1
0
mirror of https://github.com/zedeus/nitter.git synced 2025-12-06 03:55:36 -05:00

1 Commits

Author SHA1 Message Date
Zed
c9b261a793 WIP tweets/timeline parser 2022-01-30 23:38:39 +01:00
107 changed files with 2701 additions and 4434 deletions

View File

@@ -1,4 +1,4 @@
name: Docker
name: CI/CD
on:
push:
@@ -8,56 +8,31 @@ on:
- master
jobs:
tests:
uses: ./.github/workflows/run-tests.yml
secrets: inherit
build-docker-amd64:
needs: [tests]
runs-on: buildjet-2vcpu-ubuntu-2204
build-docker:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v1
with:
version: latest
- name: Login to DockerHub
uses: docker/login-action@v2
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push AMD64 Docker image
uses: docker/build-push-action@v3
- name: Build and push
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile
platforms: linux/amd64
push: true
tags: zedeus/nitter:latest,zedeus/nitter:${{ github.sha }}
build-docker-arm64:
needs: [tests]
runs-on: buildjet-2vcpu-ubuntu-2204-arm
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
with:
version: latest
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push ARM64 Docker image
uses: docker/build-push-action@v3
with:
context: .
file: ./Dockerfile.arm64
platforms: linux/arm64
push: true
tags: zedeus/nitter:latest-arm64,zedeus/nitter:${{ github.sha }}-arm64

View File

@@ -1,108 +0,0 @@
name: Tests
on:
push:
paths-ignore:
- "*.md"
branches-ignore:
- master
workflow_call:
# Ensure that multiple runs on the same branch do not overlap.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
build-test:
name: Build and test
runs-on: buildjet-2vcpu-ubuntu-2204
strategy:
matrix:
nim: ["2.0.x", "2.2.x", "devel"]
steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Cache Nimble Dependencies
id: cache-nimble
uses: buildjet/cache@v4
with:
path: ~/.nimble
key: ${{ matrix.nim }}-nimble-v2-${{ hashFiles('*.nimble') }}
restore-keys: |
${{ matrix.nim }}-nimble-v2-
- name: Setup Nim
uses: jiro4989/setup-nim-action@v2
with:
nim-version: ${{ matrix.nim }}
use-nightlies: true
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Project
run: nimble build -d:release -Y
integration-test:
needs: [build-test]
name: Integration test
runs-on: buildjet-2vcpu-ubuntu-2204
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Cache Nimble Dependencies
id: cache-nimble
uses: buildjet/cache@v4
with:
path: ~/.nimble
key: devel-nimble-v2-${{ hashFiles('*.nimble') }}
restore-keys: |
devel-nimble-v2-
- name: Setup Python (3.10) with pip cache
uses: buildjet/setup-python@v4
with:
python-version: "3.10"
cache: pip
- name: Setup Nim
uses: jiro4989/setup-nim-action@v2
with:
nim-version: devel
use-nightlies: true
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Project
run: nimble build -d:release -Y
- name: Install SeleniumBase and Chromedriver
run: |
pip install seleniumbase
seleniumbase install chromedriver
- name: Start Redis Service
uses: supercharge/redis-github-action@1.5.0
- name: Prepare Nitter Environment
run: |
sudo apt-get update && sudo apt-get install -y libsass-dev
cp nitter.example.conf nitter.conf
sed -i 's/enableDebug = false/enableDebug = true/g' nitter.conf
nimble md
nimble scss
echo '${{ secrets.SESSIONS }}' | head -n1
echo '${{ secrets.SESSIONS }}' > ./sessions.jsonl
- name: Run Tests
run: |
./nitter &
pytest -n1 tests

6
.gitignore vendored
View File

@@ -3,13 +3,9 @@ nitter
*.db
/tests/__pycache__
/tests/geckodriver.log
/tests/downloaded_files
/tests/latest_logs
/tests/downloaded_files/*
/tools/gencss
/tools/rendermd
/public/css/style.css
/public/md/*.html
nitter.conf
guest_accounts.json*
sessions.json*
dump.rdb

View File

@@ -1,5 +1,6 @@
FROM nimlang/nim:2.2.0-alpine-regular as nim
FROM nimlang/nim:1.6.2-alpine-regular as nim
LABEL maintainer="setenforce@protonmail.com"
EXPOSE 8080
RUN apk --no-cache add libsass-dev pcre
@@ -9,17 +10,14 @@ COPY nitter.nimble .
RUN nimble install -y --depsOnly
COPY . .
RUN nimble build -d:danger -d:lto -d:strip --mm:refc \
RUN nimble build -d:danger -d:lto -d:strip \
&& nimble scss \
&& nimble md
FROM alpine:latest
WORKDIR /src/
RUN apk --no-cache add pcre ca-certificates
RUN apk --no-cache add pcre
COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public
EXPOSE 8080
RUN adduser -h /src/ -D -s /bin/sh nitter
USER nitter
CMD ./nitter

View File

@@ -1,25 +0,0 @@
FROM alpine:3.20.6 as nim
LABEL maintainer="setenforce@protonmail.com"
RUN apk --no-cache add libsass-dev pcre gcc git libc-dev nim nimble
WORKDIR /src/nitter
COPY nitter.nimble .
RUN nimble install -y --depsOnly
COPY . .
RUN nimble build -d:danger -d:lto -d:strip --mm:refc \
&& nimble scss \
&& nimble md
FROM alpine:3.20.6
WORKDIR /src/
RUN apk --no-cache add pcre ca-certificates openssl
COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public
EXPOSE 8080
RUN adduser -h /src/ -D -s /bin/sh nitter
USER nitter
CMD ./nitter

View File

@@ -1,38 +1,29 @@
# Nitter
[![Test Matrix](https://github.com/zedeus/nitter/workflows/Tests/badge.svg)](https://github.com/zedeus/nitter/actions/workflows/run-tests.yml)
[![Test Matrix](https://github.com/zedeus/nitter/workflows/Docker/badge.svg)](https://github.com/zedeus/nitter/actions/workflows/build-docker.yml)
[![Test Matrix](https://github.com/zedeus/nitter/workflows/CI/CD/badge.svg)](https://github.com/zedeus/nitter/actions?query=workflow%3ACI/CD)
[![License](https://img.shields.io/github/license/zedeus/nitter?style=flat)](#license)
> [!NOTE]
> Running a Nitter instance now requires real accounts, since Twitter removed the previous methods. \
> This does not affect users. \
> For instructions on how to obtain session tokens, see [Creating session tokens](https://github.com/zedeus/nitter/wiki/Creating-session-tokens).
A free and open source alternative Twitter front-end focused on privacy and
performance. \
Inspired by the [Invidious](https://github.com/iv-org/invidious) project.
Inspired by the [Invidious](https://github.com/iv-org/invidious)
project.
- No JavaScript or ads
- All requests go through the backend, client never talks to Twitter
- Prevents Twitter from tracking your IP or JavaScript fingerprint
- Uses Twitter's unofficial API (no developer account required)
- Uses Twitter's unofficial API (no rate limits or developer account required)
- Lightweight (for [@nim_lang](https://nitter.net/nim_lang), 60KB vs 784KB from twitter.com)
- RSS feeds
- Themes
- Mobile support (responsive design)
- AGPLv3 licensed, no proprietary instances permitted
<details>
<summary>Donations</summary>
Liberapay: https://liberapay.com/zedeus<br>
Patreon: https://patreon.com/nitter<br>
BTC: bc1qpqpzjkcpgluhzf7x9yqe7jfe8gpfm5v08mdr55<br>
ETH: 0x24a0DB59A923B588c7A5EBd0dBDFDD1bCe9c4460<br>
XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscWGWJCczFLe9RFhM3d1zpL<br>
SOL: ANsyGNXFo6osuFwr1YnUqif2RdoYRhc27WdyQNmmETSW<br>
ZEC: u1vndfqtzyy6qkzhkapxelel7ams38wmfeccu3fdpy2wkuc4erxyjm8ncjhnyg747x6t0kf0faqhh2hxyplgaum08d2wnj4n7cyu9s6zhxkqw2aef4hgd4s6vh5hpqvfken98rg80kgtgn64ff70djy7s8f839z00hwhuzlcggvefhdlyszkvwy3c7yw623vw3rvar6q6evd3xcvveypt
</details>
Liberapay: https://liberapay.com/zedeus \
Patreon: https://patreon.com/nitter \
BTC: bc1qp7q4qz0fgfvftm5hwz3vy284nue6jedt44kxya \
ETH: 0x66d84bc3fd031b62857ad18c62f1ba072b011925 \
LTC: ltc1qhsz5nxw6jw9rdtw9qssjeq2h8hqk2f85rdgpkr \
XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscWGWJCczFLe9RFhM3d1zpL
## Roadmap
@@ -43,20 +34,19 @@ ZEC: u1vndfqtzyy6qkzhkapxelel7ams38wmfeccu3fdpy2wkuc4erxyjm8ncjhnyg747x6t0kf0faq
## Resources
The wiki contains
The wiki contains
[a list of instances](https://github.com/zedeus/nitter/wiki/Instances) and
[browser extensions](https://github.com/zedeus/nitter/wiki/Extensions)
maintained by the community.
## Why?
It's impossible to use Twitter without JavaScript enabled, and as of 2024 you
need to sign up. For privacy-minded folks, preventing JavaScript analytics and
IP-based tracking is important, but apart from using a VPN and uBlock/uMatrix,
it's impossible. Despite being behind a VPN and using heavy-duty adblockers,
you can get accurately tracked with your [browser's
fingerprint](https://restoreprivacy.com/browser-fingerprinting/), [no
JavaScript required](https://noscriptfingerprint.com/). This all became
It's impossible to use Twitter without JavaScript enabled. For privacy-minded
folks, preventing JavaScript analytics and IP-based tracking is important, but
apart from using a VPN and uBlock/uMatrix, it's impossible. Despite being behind
a VPN and using heavy-duty adblockers, you can get accurately tracked with your
[browser's fingerprint](https://restoreprivacy.com/browser-fingerprinting/),
[no JavaScript required](https://noscriptfingerprint.com/). This all became
particularly important after Twitter [removed the
ability](https://www.eff.org/deeplinks/2020/04/twitter-removes-privacy-option-and-shows-why-we-need-strong-privacy-laws)
for users to control whether their data gets sent to advertisers.
@@ -77,24 +67,21 @@ Twitter account.
## Installation
### Dependencies
- libpcre
- libsass
- redis/valkey
* libpcre
* libsass
* redis
To compile Nitter you need a Nim installation, see
[nim-lang.org](https://nim-lang.org/install.html) for details. It is possible
to install it system-wide or in the user directory you create below.
[nim-lang.org](https://nim-lang.org/install.html) for details. It is possible to
install it system-wide or in the user directory you create below.
To compile the scss files, you need to install `libsass`. On Ubuntu and Debian,
you can use `libsass-dev`.
Redis is required for caching and in the future for account info. As of 2024
Redis is no longer open source, so using the fork Valkey is recommended. It
should be available on most distros as `redis` or `redis-server`
(Ubuntu/Debian), or `valkey`/`valkey-server`. Running it with the default
config is fine, Nitter's default config is set to use the default port and
localhost.
Redis is required for caching and in the future for account info. It should be
available on most distros as `redis` or `redis-server` (Ubuntu/Debian).
Running it with the default config is fine, Nitter's default config is set to
use the default Redis port and localhost.
Here's how to create a `nitter` user, clone the repo, and build the project
along with the scss and md files.
@@ -104,7 +91,7 @@ along with the scss and md files.
# su nitter
$ git clone https://github.com/zedeus/nitter
$ cd nitter
$ nimble build -d:danger --mm:refc
$ nimble build -d:release
$ nimble scss
$ nimble md
$ cp nitter.example.conf nitter.conf
@@ -121,32 +108,25 @@ performance reasons.
### Docker
Page for the Docker image: https://hub.docker.com/r/zedeus/nitter
#### NOTE: For ARM64 support, please use the separate ARM64 docker image: [`zedeus/nitter:latest-arm64`](https://hub.docker.com/r/zedeus/nitter/tags).
#### NOTE: For ARM64/ARM support, please use [unixfox's image](https://quay.io/repository/unixfox/nitter?tab=tags), more info [here](https://github.com/zedeus/nitter/issues/399#issuecomment-997263495)
To run Nitter with Docker, you'll need to install and run Redis separately
before you can run the container. See below for how to also run Redis using
Docker.
To build and run Nitter in Docker:
```bash
docker build -t nitter:latest .
docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host nitter:latest
```
Note: For ARM64, use this Dockerfile: [`Dockerfile.arm64`](https://github.com/zedeus/nitter/blob/master/Dockerfile.arm64).
A prebuilt Docker image is provided as well:
```bash
docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host zedeus/nitter:latest
```
Using docker-compose to run both Nitter and Redis as different containers:
Change `redisHost` from `localhost` to `nitter-redis` in `nitter.conf`, then run:
```bash
docker-compose up -d
```

View File

@@ -1,13 +1,17 @@
--define:ssl
--define:useStdLib
--threads:off
# workaround httpbeast file upload bug
--assertions:off
# disable annoying warnings
warning("GcUnsafe2", off)
warning("HoleEnumConv", off)
hint("XDeclaredButNotUsed", off)
hint("XCannotRaiseY", off)
hint("User", off)
const
nimVersion = (major: NimMajor, minor: NimMinor, patch: NimPatch)
when nimVersion >= (1, 6, 0):
warning("HoleEnumConv", off)

View File

@@ -8,22 +8,10 @@ services:
ports:
- "127.0.0.1:8080:8080" # Replace with "8080:8080" if you don't use a reverse proxy
volumes:
- ./nitter.conf:/src/nitter.conf:Z,ro
- ./sessions.jsonl:/src/sessions.jsonl:Z,ro # Run get_sessions.py to get the credentials
- ./nitter.conf:/src/nitter.conf:ro
depends_on:
- nitter-redis
restart: unless-stopped
healthcheck:
test: wget -nv --tries=1 --spider http://127.0.0.1:8080/Jack/status/20 || exit 1
interval: 30s
timeout: 5s
retries: 2
user: "998:998"
read_only: true
security_opt:
- no-new-privileges:true
cap_drop:
- ALL
nitter-redis:
image: redis:6-alpine
@@ -32,17 +20,6 @@ services:
volumes:
- nitter-redis:/data
restart: unless-stopped
healthcheck:
test: redis-cli ping
interval: 30s
timeout: 5s
retries: 2
user: "999:1000"
read_only: true
security_opt:
- no-new-privileges:true
cap_drop:
- ALL
volumes:
nitter-redis:

View File

@@ -1,11 +1,11 @@
[Server]
hostname = "nitter.net" # for generating links, change this to your own domain/ip
title = "nitter"
address = "0.0.0.0"
port = 8080
https = false # disable to enable cookies when not using https
httpMaxConnections = 100
staticDir = "./public"
title = "nitter"
hostname = "nitter.net"
[Cache]
listMinutes = 240 # how long to cache list info (not the tweets, so keep it high)
@@ -13,9 +13,9 @@ rssMinutes = 10 # how long to cache rss queries
redisHost = "localhost" # Change to "nitter-redis" if using docker-compose
redisPort = 6379
redisPassword = ""
redisConnections = 20 # minimum open connections in pool
redisConnections = 20 # connection pool size
redisMaxConnections = 30
# new connections are opened when none are available, but if the pool size
# max, new connections are opened when none are available, but if the pool size
# goes above this, they're closed when released. don't worry about this unless
# you receive tons of requests per second
@@ -23,18 +23,23 @@ redisMaxConnections = 30
hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds
enableDebug = false # enable request logs and debug endpoints (/.sessions)
enableDebug = false # enable request logs and debug endpoints
proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = ""
apiProxy = "" # nitter-proxy host, e.g. localhost:7000
disableTid = false # enable this if cookie-based auth is failing
tokenCount = 10
# minimum amount of usable tokens. tokens are used to authorize API requests,
# but they expire after ~1 hour, and have a limit of 187 requests.
# the limit gets reset every 15 minutes, and the pool is filled up so there's
# always at least $tokenCount usable tokens. again, only increase this if
# you receive major bursts all the time
# Change default preferences here, see src/prefs_impl.nim for a complete list
[Preferences]
theme = "Nitter"
replaceTwitter = "nitter.net"
replaceYouTube = "piped.video"
replaceYouTube = "piped.kavin.rocks"
replaceReddit = "teddit.net"
replaceInstagram = ""
proxyVideos = true
hlsPlayback = false
infiniteScroll = false

View File

@@ -10,20 +10,20 @@ bin = @["nitter"]
# Dependencies
requires "nim >= 2.0.0"
requires "jester#baca3f"
requires "karax#5cf360c"
requires "sass#7dfdd03"
requires "nimcrypto#a079df9"
requires "markdown#158efe3"
requires "packedjson#9e6fbb6"
requires "supersnappy#6c94198"
requires "nim >= 1.4.8"
requires "jester >= 0.5.0"
requires "karax#c71bc92"
requires "sass#e683aa1"
requires "nimcrypto#a5742a9"
requires "markdown#abdbe5e"
requires "packedjson#d11d167"
requires "supersnappy#2.1.1"
requires "redpool#8b7c1db"
requires "https://github.com/zedeus/redis#d0a0e6f"
requires "zippy#ca5989a"
requires "flatty#e668085"
requires "jsony#1de1f08"
requires "oauth#b8c163b"
requires "zippy#0.7.3"
requires "flatty#0.2.3"
requires "jsony#d0e69bd"
# Tasks

View File

@@ -1,138 +1,53 @@
@font-face {
font-family: "fontello";
src: url("/fonts/fontello.eot?77185648");
src:
url("/fonts/fontello.eot?77185648#iefix") format("embedded-opentype"),
url("/fonts/fontello.woff2?77185648") format("woff2"),
url("/fonts/fontello.woff?77185648") format("woff"),
url("/fonts/fontello.ttf?77185648") format("truetype"),
url("/fonts/fontello.svg?77185648#fontello") format("svg");
font-family: 'fontello';
src: url('/fonts/fontello.eot?21002321');
src: url('/fonts/fontello.eot?21002321#iefix') format('embedded-opentype'),
url('/fonts/fontello.woff2?21002321') format('woff2'),
url('/fonts/fontello.woff?21002321') format('woff'),
url('/fonts/fontello.ttf?21002321') format('truetype'),
url('/fonts/fontello.svg?21002321#fontello') format('svg');
font-weight: normal;
font-style: normal;
}
[class^="icon-"]:before,
[class*=" icon-"]:before {
[class^="icon-"]:before, [class*=" icon-"]:before {
font-family: "fontello";
font-style: normal;
font-weight: normal;
speak: never;
display: inline-block;
text-decoration: inherit;
width: 1em;
margin-right: 0.2em;
text-align: center;
/* For safety - reset parent styles, that can break glyph codes*/
font-variant: normal;
text-transform: none;
/* fix buttons height, for twitter bootstrap */
line-height: 1em;
/* Font smoothing. That was taken from TWBS */
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
.icon-views:before {
content: "\e800";
}
/* '' */
.icon-heart:before {
content: "\e801";
}
/* '' */
.icon-quote:before {
content: "\e802";
}
/* '' */
.icon-comment:before {
content: "\e803";
}
/* '' */
.icon-play:before {
content: "\e805";
}
/* '' */
.icon-link:before {
content: "\e806";
}
/* '' */
.icon-calendar:before {
content: "\e807";
}
/* '' */
.icon-location:before {
content: "\e808";
}
/* '' */
.icon-picture:before {
content: "\e809";
}
/* '' */
.icon-lock:before {
content: "\e80a";
}
/* '' */
.icon-down:before {
content: "\e80b";
}
/* '' */
.icon-retweet:before {
content: "\e80c";
}
/* '' */
.icon-search:before {
content: "\e80d";
}
/* '' */
.icon-pin:before {
content: "\e80e";
}
/* '' */
.icon-cog:before {
content: "\e80f";
}
/* '' */
.icon-rss:before {
content: "\e810";
}
/* '' */
.icon-ok:before {
content: "\e811";
}
/* '' */
.icon-circle:before {
content: "\f111";
}
/* '' */
.icon-info:before {
content: "\f128";
}
/* '' */
.icon-bird:before {
content: "\f309";
}
/* '' */
.icon-heart:before { content: '\2665'; } /* '♥' */
.icon-quote:before { content: '\275e'; } /* '❞' */
.icon-comment:before { content: '\e802'; } /* '' */
.icon-ok:before { content: '\e803'; } /* '' */
.icon-play:before { content: '\e804'; } /* '' */
.icon-link:before { content: '\e805'; } /* '' */
.icon-calendar:before { content: '\e806'; } /* '' */
.icon-location:before { content: '\e807'; } /* '' */
.icon-picture:before { content: '\e809'; } /* '' */
.icon-lock:before { content: '\e80a'; } /* '' */
.icon-down:before { content: '\e80b'; } /* '' */
.icon-retweet:before { content: '\e80d'; } /* '' */
.icon-search:before { content: '\e80e'; } /* '' */
.icon-pin:before { content: '\e80f'; } /* '' */
.icon-cog:before { content: '\e812'; } /* '' */
.icon-rss-feed:before { content: '\e813'; } /* '' */
.icon-info:before { content: '\f128'; } /* '' */
.icon-bird:before { content: '\f309'; } /* '' */

View File

@@ -1,41 +0,0 @@
body {
--bg_color: #282a36;
--fg_color: #f8f8f2;
--fg_faded: #818eb6;
--fg_dark: var(--fg_faded);
--fg_nav: var(--accent);
--bg_panel: #343746;
--bg_elements: #292b36;
--bg_overlays: #44475a;
--bg_hover: #2f323f;
--grey: var(--fg_faded);
--dark_grey: #44475a;
--darker_grey: #3d4051;
--darkest_grey: #363948;
--border_grey: #44475a;
--accent: #bd93f9;
--accent_light: #caa9fa;
--accent_dark: var(--accent);
--accent_border: #ff79c696;
--play_button: #ffb86c;
--play_button_hover: #ffc689;
--more_replies_dots: #bd93f9;
--error_red: #ff5555;
--verified_blue: var(--accent);
--icon_text: ##F8F8F2;
--tab: #6272a4;
--tab_selected: var(--accent);
--profile_stat: #919cbf;
}
.search-bar > form input::placeholder{
color: var(--fg_faded);
}

View File

@@ -1,15 +1,6 @@
Font license info
## Modern Pictograms
Copyright (c) 2012 by John Caserta. All rights reserved.
Author: John Caserta
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://thedesignoffice.org/project/modern-pictograms/
## Entypo
Copyright (C) 2012 by Daniel Bruce
@@ -46,3 +37,12 @@ Font license info
Homepage: http://aristeides.com/
## Modern Pictograms
Copyright (c) 2012 by John Caserta. All rights reserved.
Author: John Caserta
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://thedesignoffice.org/project/modern-pictograms/

Binary file not shown.

View File

@@ -1,26 +1,26 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg">
<metadata>Copyright (C) 2025 by original authors @ fontello.com</metadata>
<metadata>Copyright (C) 2020 by original authors @ fontello.com</metadata>
<defs>
<font id="fontello" horiz-adv-x="1000" >
<font-face font-family="fontello" font-weight="400" font-stretch="normal" units-per-em="1000" ascent="850" descent="-150" />
<missing-glyph horiz-adv-x="1000" />
<glyph glyph-name="views" unicode="&#xe800;" d="M180 516l0-538-180 0 0 538 180 0z m250-138l0-400-180 0 0 400 180 0z m250 344l0-744-180 0 0 744 180 0z" horiz-adv-x="680" />
<glyph glyph-name="heart" unicode="&#x2665;" d="M790 644q70-64 70-156t-70-158l-360-330-360 330q-70 66-70 158t70 156q62 58 151 58t153-58l56-52 58 52q62 58 150 58t152-58z" horiz-adv-x="860" />
<glyph glyph-name="heart" unicode="&#xe801;" d="M790 644q70-64 70-156t-70-158l-360-330-360 330q-70 66-70 158t70 156q62 58 151 58t153-58l56-52 58 52q62 58 150 58t152-58z" horiz-adv-x="860" />
<glyph glyph-name="quote" unicode="&#x275e;" d="M18 685l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z m558 0l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z" horiz-adv-x="928" />
<glyph glyph-name="quote" unicode="&#xe802;" d="M18 685l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z m558 0l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z" horiz-adv-x="928" />
<glyph glyph-name="comment" unicode="&#xe802;" d="M1000 350q0-97-67-179t-182-130-251-48q-39 0-81 4-110-97-257-135-27-8-63-12-10-1-17 5t-10 16v1q-2 2 0 6t1 6 2 5l4 5t4 5 4 5q4 5 17 19t20 22 17 22 18 28 15 33 15 42q-88 50-138 123t-51 157q0 73 40 139t106 114 160 76 194 28q136 0 251-48t182-130 67-179z" horiz-adv-x="1000" />
<glyph glyph-name="comment" unicode="&#xe803;" d="M1000 350q0-97-67-179t-182-130-251-48q-39 0-81 4-110-97-257-135-27-8-63-12-10-1-17 5t-10 16v1q-2 2 0 6t1 6 2 5l4 5t4 5 4 5q4 5 17 19t20 22 17 22 18 28 15 33 15 42q-88 50-138 123t-51 157q0 73 40 139t106 114 160 76 194 28q136 0 251-48t182-130 67-179z" horiz-adv-x="1000" />
<glyph glyph-name="ok" unicode="&#xe803;" d="M0 260l162 162 166-164 508 510 164-164-510-510-162-162-162 164z" horiz-adv-x="1000" />
<glyph glyph-name="play" unicode="&#xe805;" d="M772 333l-741-412q-13-7-22-2t-9 20v822q0 14 9 20t22-2l741-412q13-7 13-17t-13-17z" horiz-adv-x="785.7" />
<glyph glyph-name="play" unicode="&#xe804;" d="M772 333l-741-412q-13-7-22-2t-9 20v822q0 14 9 20t22-2l741-412q13-7 13-17t-13-17z" horiz-adv-x="785.7" />
<glyph glyph-name="link" unicode="&#xe806;" d="M294 116q14 14 34 14t36-14q32-34 0-70l-42-40q-56-56-132-56-78 0-134 56t-56 132q0 78 56 134l148 148q70 68 144 77t128-43q16-16 16-36t-16-36q-36-32-70 0-50 48-132-34l-148-146q-26-26-26-64t26-62q26-26 63-26t63 26z m450 574q56-56 56-132 0-78-56-134l-158-158q-74-72-150-72-62 0-112 50-14 14-14 34t14 36q14 14 35 14t35-14q50-48 122 24l158 156q28 28 28 64 0 38-28 62-24 26-56 31t-60-21l-50-50q-16-14-36-14t-34 14q-34 34 0 70l50 50q54 54 127 51t129-61z" horiz-adv-x="800" />
<glyph glyph-name="link" unicode="&#xe805;" d="M294 116q14 14 34 14t36-14q32-34 0-70l-42-40q-56-56-132-56-78 0-134 56t-56 132q0 78 56 134l148 148q70 68 144 77t128-43q16-16 16-36t-16-36q-36-32-70 0-50 48-132-34l-148-146q-26-26-26-64t26-62q26-26 63-26t63 26z m450 574q56-56 56-132 0-78-56-134l-158-158q-74-72-150-72-62 0-112 50-14 14-14 34t14 36q14 14 35 14t35-14q50-48 122 24l158 156q28 28 28 64 0 38-28 62-24 26-56 31t-60-21l-50-50q-16-14-36-14t-34 14q-34 34 0 70l50 50q54 54 127 51t129-61z" horiz-adv-x="800" />
<glyph glyph-name="calendar" unicode="&#xe807;" d="M800 700q42 0 71-29t29-71l0-600q0-40-29-70t-71-30l-700 0q-40 0-70 30t-30 70l0 600q0 42 30 71t70 29l46 0 0-100 160 0 0 100 290 0 0-100 160 0 0 100 44 0z m0-700l0 400-700 0 0-400 700 0z m-540 800l0-170-70 0 0 170 70 0z m450 0l0-170-70 0 0 170 70 0z" horiz-adv-x="900" />
<glyph glyph-name="calendar" unicode="&#xe806;" d="M800 700q42 0 71-29t29-71l0-600q0-40-29-70t-71-30l-700 0q-40 0-70 30t-30 70l0 600q0 42 30 71t70 29l46 0 0-100 160 0 0 100 290 0 0-100 160 0 0 100 44 0z m0-700l0 400-700 0 0-400 700 0z m-540 800l0-170-70 0 0 170 70 0z m450 0l0-170-70 0 0 170 70 0z" horiz-adv-x="900" />
<glyph glyph-name="location" unicode="&#xe808;" d="M250 750q104 0 177-73t73-177q0-106-62-243t-126-223l-62-84q-10 12-27 35t-60 89-76 130-60 147-27 149q0 104 73 177t177 73z m0-388q56 0 96 40t40 96-40 95-96 39-95-39-39-95 39-96 95-40z" horiz-adv-x="500" />
<glyph glyph-name="location" unicode="&#xe807;" d="M250 750q104 0 177-73t73-177q0-106-62-243t-126-223l-62-84q-10 12-27 35t-60 89-76 130-60 147-27 149q0 104 73 177t177 73z m0-388q56 0 96 40t40 96-40 95-96 39-95-39-39-95 39-96 95-40z" horiz-adv-x="500" />
<glyph glyph-name="picture" unicode="&#xe809;" d="M357 529q0-45-31-76t-76-32-76 32-31 76 31 76 76 31 76-31 31-76z m572-215v-250h-786v107l178 179 90-89 285 285z m53 393h-893q-7 0-12-5t-6-13v-678q0-7 6-13t12-5h893q7 0 13 5t5 13v678q0 8-5 13t-13 5z m89-18v-678q0-37-26-63t-63-27h-893q-36 0-63 27t-26 63v678q0 37 26 63t63 27h893q37 0 63-27t26-63z" horiz-adv-x="1071.4" />
@@ -28,23 +28,19 @@
<glyph glyph-name="down" unicode="&#xe80b;" d="M939 399l-414-413q-10-11-25-11t-25 11l-414 413q-11 11-11 26t11 25l93 92q10 11 25 11t25-11l296-296 296 296q11 11 25 11t26-11l92-92q11-11 11-25t-11-26z" horiz-adv-x="1000" />
<glyph glyph-name="retweet" unicode="&#xe80c;" d="M714 11q0-7-5-13t-13-5h-535q-5 0-8 1t-5 4-3 4-2 7 0 6v335h-107q-15 0-25 11t-11 25q0 13 8 23l179 214q11 12 27 12t28-12l178-214q9-10 9-23 0-15-11-25t-25-11h-107v-214h321q9 0 14-6l89-108q4-5 4-11z m357 232q0-13-8-23l-178-214q-12-13-28-13t-27 13l-179 214q-8 10-8 23 0 14 11 25t25 11h107v214h-322q-9 0-14 7l-89 107q-4 5-4 11 0 7 5 12t13 6h536q4 0 7-1t5-4 3-5 2-6 1-7v-334h107q14 0 25-11t10-25z" horiz-adv-x="1071.4" />
<glyph glyph-name="retweet" unicode="&#xe80d;" d="M714 11q0-7-5-13t-13-5h-535q-5 0-8 1t-5 4-3 4-2 7 0 6v335h-107q-15 0-25 11t-11 25q0 13 8 23l179 214q11 12 27 12t28-12l178-214q9-10 9-23 0-15-11-25t-25-11h-107v-214h321q9 0 14-6l89-108q4-5 4-11z m357 232q0-13-8-23l-178-214q-12-13-28-13t-27 13l-179 214q-8 10-8 23 0 14 11 25t25 11h107v214h-322q-9 0-14 7l-89 107q-4 5-4 11 0 7 5 12t13 6h536q4 0 7-1t5-4 3-5 2-6 1-7v-334h107q14 0 25-11t10-25z" horiz-adv-x="1071.4" />
<glyph glyph-name="search" unicode="&#xe80d;" d="M772 78q30-34 6-62l-46-46q-36-32-68 0l-190 190q-74-42-156-42-128 0-223 95t-95 223 90 219 218 91 224-95 96-223q0-88-46-162z m-678 358q0-88 68-156t156-68 151 63 63 153q0 88-68 155t-156 67-151-63-63-151z" horiz-adv-x="789" />
<glyph glyph-name="search" unicode="&#xe80e;" d="M772 78q30-34 6-62l-46-46q-36-32-68 0l-190 190q-74-42-156-42-128 0-223 95t-95 223 90 219 218 91 224-95 96-223q0-88-46-162z m-678 358q0-88 68-156t156-68 151 63 63 153q0 88-68 155t-156 67-151-63-63-151z" horiz-adv-x="789" />
<glyph glyph-name="pin" unicode="&#xe80e;" d="M268 368v250q0 8-5 13t-13 5-13-5-5-13v-250q0-8 5-13t13-5 13 5 5 13z m375-197q0-14-11-25t-25-10h-239l-29-270q-1-7-6-11t-11-5h-1q-15 0-17 15l-43 271h-225q-15 0-25 10t-11 25q0 69 44 124t99 55v286q-29 0-50 21t-22 50 22 50 50 22h357q29 0 50-22t21-50-21-50-50-21v-286q55 0 99-55t44-124z" horiz-adv-x="642.9" />
<glyph glyph-name="pin" unicode="&#xe80f;" d="M268 368v250q0 8-5 13t-13 5-13-5-5-13v-250q0-8 5-13t13-5 13 5 5 13z m375-197q0-14-11-25t-25-10h-239l-29-270q-1-7-6-11t-11-5h-1q-15 0-17 15l-43 271h-225q-15 0-25 10t-11 25q0 69 44 124t99 55v286q-29 0-50 21t-22 50 22 50 50 22h357q29 0 50-22t21-50-21-50-50-21v-286q55 0 99-55t44-124z" horiz-adv-x="642.9" />
<glyph glyph-name="cog" unicode="&#xe80f;" d="M911 295l-133-56q-8-22-12-31l55-133-79-79-135 53q-9-4-31-12l-55-134-112 0-56 133q-11 4-33 13l-132-55-78 79 53 134q-1 3-4 9t-6 12-4 11l-131 55 0 112 131 56 14 33-54 132 78 79 133-54q22 9 33 13l55 132 112 0 56-132q14-5 31-13l133 55 80-79-54-135q6-12 12-30l133-56 0-112z m-447-111q69 0 118 48t49 118-49 119-118 50-119-50-49-119 49-118 119-48z" horiz-adv-x="928" />
<glyph glyph-name="cog" unicode="&#xe812;" d="M911 295l-133-56q-8-22-12-31l55-133-79-79-135 53q-9-4-31-12l-55-134-112 0-56 133q-11 4-33 13l-132-55-78 79 53 134q-1 3-4 9t-6 12-4 11l-131 55 0 112 131 56 14 33-54 132 78 79 133-54q22 9 33 13l55 132 112 0 56-132q14-5 31-13l133 55 80-79-54-135q6-12 12-30l133-56 0-112z m-447-111q69 0 118 48t49 118-49 119-118 50-119-50-49-119 49-118 119-48z" horiz-adv-x="928" />
<glyph glyph-name="rss" unicode="&#xe810;" d="M184 93c0-51-43-91-93-91s-91 40-91 91c0 50 41 91 91 91s93-41 93-91z m261-85l-125 0c0 174-140 323-315 323l0 118c231 0 440-163 440-441z m259 0l-136 0c0 300-262 561-563 561l0 129c370 0 699-281 699-690z" horiz-adv-x="704" />
<glyph glyph-name="ok" unicode="&#xe811;" d="M933 534q0-22-16-38l-404-404-76-76q-16-15-38-15t-38 15l-76 76-202 202q-15 16-15 38t15 38l76 76q16 16 38 16t38-16l164-165 366 367q16 16 38 16t38-16l76-76q16-15 16-38z" horiz-adv-x="1000" />
<glyph glyph-name="circle" unicode="&#xf111;" d="M857 350q0-117-57-215t-156-156-215-58-216 58-155 156-58 215 58 215 155 156 216 58 215-58 156-156 57-215z" horiz-adv-x="857.1" />
<glyph glyph-name="rss-feed" unicode="&#xe813;" d="M184 93c0-51-43-91-93-91s-91 40-91 91c0 50 41 91 91 91s93-41 93-91z m261-85l-125 0c0 174-140 323-315 323l0 118c231 0 440-163 440-441z m259 0l-136 0c0 300-262 561-563 561l0 129c370 0 699-281 699-690z" horiz-adv-x="704" />
<glyph glyph-name="info" unicode="&#xf128;" d="M393 149v-134q0-9-7-15t-15-7h-134q-9 0-16 7t-7 15v134q0 9 7 16t16 6h134q9 0 15-6t7-16z m176 335q0-30-8-56t-20-43-31-33-32-25-34-19q-23-13-38-37t-15-37q0-10-7-18t-16-9h-134q-8 0-14 11t-6 20v26q0 46 37 87t79 60q33 16 47 32t14 42q0 24-26 41t-60 18q-36 0-60-16-20-14-60-64-7-9-17-9-7 0-14 4l-91 70q-8 6-9 14t3 16q89 148 259 148 45 0 90-17t81-46 59-72 23-88z" horiz-adv-x="571.4" />
<glyph glyph-name="bird" unicode="&#xf309;" d="M920 636q-36-54-94-98l0-24q0-130-60-250t-186-203-290-83q-160 0-290 84 14-2 46-2 132 0 234 80-62 2-110 38t-66 94q10-4 34-4 26 0 50 6-66 14-108 66t-42 120l0 2q36-20 84-24-84 58-84 158 0 48 26 94 154-188 390-196-6 18-6 42 0 78 55 133t135 55q82 0 136-58 60 12 120 44-20-66-82-104 56 8 108 30z" horiz-adv-x="920" />
</font>
</defs>
</svg>
</svg>

Before

Width:  |  Height:  |  Size: 6.4 KiB

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

5
public/js/hls.light.min.js vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -5,7 +5,7 @@ function insertBeforeLast(node, elem) {
}
function getLoadMore(doc) {
return doc.querySelector(".show-more:not(.timeline-item)");
return doc.querySelector('.show-more:not(.timeline-item)');
}
function isDuplicate(item, itemClass) {
@@ -15,19 +15,18 @@ function isDuplicate(item, itemClass) {
return document.querySelector(itemClass + " .tweet-link[href='" + href + "']") != null;
}
window.onload = function () {
window.onload = function() {
const url = window.location.pathname;
const isTweet = url.indexOf("/status/") !== -1;
const containerClass = isTweet ? ".replies" : ".timeline";
const itemClass = containerClass + " > div:not(.top-ref)";
const itemClass = containerClass + ' > div:not(.top-ref)';
var html = document.querySelector("html");
var container = document.querySelector(containerClass);
var loading = false;
function handleScroll(failed) {
window.addEventListener('scroll', function() {
if (loading) return;
if (html.scrollTop + html.clientHeight >= html.scrollHeight - 3000) {
loading = true;
var loadMore = getLoadMore(document);
@@ -36,15 +35,13 @@ window.onload = function () {
loadMore.children[0].text = "Loading...";
var url = new URL(loadMore.children[0].href);
url.searchParams.append("scroll", "true");
url.searchParams.append('scroll', 'true');
fetch(url.toString()).then(function (response) {
if (response.status === 404) throw "error";
return response.text();
}).then(function (html) {
var parser = new DOMParser();
var doc = parser.parseFromString(html, "text/html");
var doc = parser.parseFromString(html, 'text/html');
loadMore.remove();
for (var item of doc.querySelectorAll(itemClass)) {
@@ -60,18 +57,10 @@ window.onload = function () {
if (isTweet) container.appendChild(newLoadMore);
else insertBeforeLast(container, newLoadMore);
}).catch(function (err) {
console.warn("Something went wrong.", err);
if (failed > 3) {
loadMore.children[0].text = "Error";
return;
}
loading = false;
handleScroll((failed || 0) + 1);
console.warn('Something went wrong.', err);
loading = true;
});
}
}
window.addEventListener("scroll", () => handleScroll());
});
};
// @license-end

View File

@@ -4,15 +4,15 @@ Nitter is a free and open source alternative Twitter front-end focused on
privacy and performance. The source is available on GitHub at
<https://github.com/zedeus/nitter>
- No JavaScript or ads
- All requests go through the backend, client never talks to Twitter
- Prevents Twitter from tracking your IP or JavaScript fingerprint
- Uses Twitter's unofficial API (no developer account required)
- Lightweight (for [@nim_lang](/nim_lang), 60KB vs 784KB from twitter.com)
- RSS feeds
- Themes
- Mobile support (responsive design)
- AGPLv3 licensed, no proprietary instances permitted
* No JavaScript or ads
* All requests go through the backend, client never talks to Twitter
* Prevents Twitter from tracking your IP or JavaScript fingerprint
* Uses Twitter's unofficial API (no rate limits or developer account required)
* Lightweight (for [@nim_lang](/nim_lang), 60KB vs 784KB from twitter.com)
* RSS feeds
* Themes
* Mobile support (responsive design)
* AGPLv3 licensed, no proprietary instances permitted
Nitter's GitHub wiki contains
[instances](https://github.com/zedeus/nitter/wiki/Instances) and
@@ -21,13 +21,12 @@ maintained by the community.
## Why use Nitter?
It's impossible to use Twitter without JavaScript enabled, and as of 2024 you
need to sign up. For privacy-minded folks, preventing JavaScript analytics and
IP-based tracking is important, but apart from using a VPN and uBlock/uMatrix,
it's impossible. Despite being behind a VPN and using heavy-duty adblockers,
you can get accurately tracked with your [browser's
fingerprint](https://restoreprivacy.com/browser-fingerprinting/), [no
JavaScript required](https://noscriptfingerprint.com/). This all became
It's impossible to use Twitter without JavaScript enabled. For privacy-minded
folks, preventing JavaScript analytics and IP-based tracking is important, but
apart from using a VPN and uBlock/uMatrix, it's impossible. Despite being behind
a VPN and using heavy-duty adblockers, you can get accurately tracked with your
[browser's fingerprint](https://restoreprivacy.com/browser-fingerprinting/),
[no JavaScript required](https://noscriptfingerprint.com/). This all became
particularly important after Twitter [removed the
ability](https://www.eff.org/deeplinks/2020/04/twitter-removes-privacy-option-and-shows-why-we-need-strong-privacy-laws)
for users to control whether their data gets sent to advertisers.
@@ -43,13 +42,12 @@ Twitter account.
## Donating
Liberapay: https://liberapay.com/zedeus \
Patreon: https://patreon.com/nitter \
BTC: bc1qpqpzjkcpgluhzf7x9yqe7jfe8gpfm5v08mdr55 \
ETH: 0x24a0DB59A923B588c7A5EBd0dBDFDD1bCe9c4460 \
XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscWGWJCczFLe9RFhM3d1zpL \
SOL: ANsyGNXFo6osuFwr1YnUqif2RdoYRhc27WdyQNmmETSW \
ZEC: u1vndfqtzyy6qkzhkapxelel7ams38wmfeccu3fdpy2wkuc4erxyjm8ncjhnyg747x6t0kf0faqhh2hxyplgaum08d2wnj4n7cyu9s6zhxkqw2aef4hgd4s6vh5hpqvfken98rg80kgtgn64ff70djy7s8f839z00hwhuzlcggvefhdlyszkvwy3c7yw623vw3rvar6q6evd3xcvveypt
Liberapay: <https://liberapay.com/zedeus> \
Patreon: <https://patreon.com/nitter> \
BTC: bc1qp7q4qz0fgfvftm5hwz3vy284nue6jedt44kxya \
ETH: 0x66d84bc3fd031b62857ad18c62f1ba072b011925 \
LTC: ltc1qhsz5nxw6jw9rdtw9qssjeq2h8hqk2f85rdgpkr \
XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscWGWJCczFLe9RFhM3d1zpL
## Contact

View File

@@ -1,5 +0,0 @@
User-agent: *
Disallow: /
Crawl-delay: 1
User-agent: Twitterbot
Disallow:

View File

@@ -1,194 +1,124 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, strutils, sequtils, sugar
import asyncdispatch, httpclient, uri, strutils, sequtils, sugar
import packedjson
import types, query, formatters, consts, apiutils, parser
import experimental/parser as newParser
# Helper to generate params object for GraphQL requests
proc genParams(variables: string; fieldToggles = ""): seq[(string, string)] =
result.add ("variables", variables)
result.add ("features", gqlFeatures)
if fieldToggles.len > 0:
result.add ("fieldToggles", fieldToggles)
proc apiUrl(endpoint, variables: string; fieldToggles = ""): ApiUrl =
return ApiUrl(endpoint: endpoint, params: genParams(variables, fieldToggles))
proc apiReq(endpoint, variables: string; fieldToggles = ""): ApiReq =
let url = apiUrl(endpoint, variables, fieldToggles)
return ApiReq(cookie: url, oauth: url)
proc mediaUrl(id: string; cursor: string): ApiReq =
result = ApiReq(
cookie: apiUrl(graphUserMedia, userMediaVars % [id, cursor]),
oauth: apiUrl(graphUserMediaV2, restIdVars % [id, cursor])
)
proc userTweetsUrl(id: string; cursor: string): ApiReq =
result = ApiReq(
# cookie: apiUrl(graphUserTweets, userTweetsVars % [id, cursor], userTweetsFieldToggles),
oauth: apiUrl(graphUserTweetsV2, restIdVars % [id, cursor])
)
# might change this in the future pending testing
result.cookie = result.oauth
proc userTweetsAndRepliesUrl(id: string; cursor: string): ApiReq =
let cookieVars = userTweetsAndRepliesVars % [id, cursor]
result = ApiReq(
cookie: apiUrl(graphUserTweetsAndReplies, cookieVars, userTweetsFieldToggles),
oauth: apiUrl(graphUserTweetsAndRepliesV2, restIdVars % [id, cursor])
)
proc tweetDetailUrl(id: string; cursor: string): ApiReq =
let cookieVars = tweetDetailVars % [id, cursor]
result = ApiReq(
# cookie: apiUrl(graphTweetDetail, cookieVars, tweetDetailFieldToggles),
cookie: apiUrl(graphTweet, tweetVars % [id, cursor]),
oauth: apiUrl(graphTweet, tweetVars % [id, cursor])
)
proc userUrl(username: string): ApiReq =
let cookieVars = """{"screen_name":"$1","withGrokTranslatedBio":false}""" % username
result = ApiReq(
cookie: apiUrl(graphUser, cookieVars, tweetDetailFieldToggles),
oauth: apiUrl(graphUserV2, """{"screen_name": "$1"}""" % username)
)
proc getGraphUser*(username: string): Future[User] {.async.} =
if username.len == 0: return
let js = await fetchRaw(userUrl(username))
result = parseGraphUser(js)
proc getGraphUserById*(id: string): Future[User] {.async.} =
proc getGraphUser*(id: string): Future[User] {.async.} =
if id.len == 0 or id.any(c => not c.isDigit): return
let
url = apiReq(graphUserById, """{"rest_id": "$1"}""" % id)
js = await fetchRaw(url)
variables = %*{"userId": id, "withSuperFollowsUserFields": true}
js = await fetchRaw(graphUser ? {"variables": $variables}, Api.userRestId)
result = parseGraphUser(js)
proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profile] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
url = case kind
of TimelineKind.tweets: userTweetsUrl(id, cursor)
of TimelineKind.replies: userTweetsAndRepliesUrl(id, cursor)
of TimelineKind.media: mediaUrl(id, cursor)
js = await fetch(url)
result = parseGraphTimeline(js, after)
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
url = apiReq(graphListTweets, restIdVars % [id, cursor])
js = await fetch(url)
result = parseGraphTimeline(js, after).tweets
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
let
variables = %*{"screenName": name, "listSlug": list}
url = apiReq(graphListBySlug, $variables)
js = await fetch(url)
result = parseGraphList(js)
variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false}
url = graphListBySlug ? {"variables": $variables}
result = parseGraphList(await fetch(url, Api.listBySlug))
proc getGraphList*(id: string): Future[List] {.async.} =
let
url = apiReq(graphListById, """{"listId": "$1"}""" % id)
js = await fetch(url)
result = parseGraphList(js)
let
variables = %*{"listId": id, "withHighlightedLabel": false}
url = graphList ? {"variables": $variables}
result = parseGraphList(await fetch(url, Api.list))
proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
if list.id.len == 0: return
var
let
variables = %*{
"listId": list.id,
"cursor": after,
"withSuperFollowsUserFields": false,
"withBirdwatchPivots": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
"withReactionsPerspective": false,
"withSuperFollowsTweetFields": false
}
if after.len > 0:
variables["cursor"] = % after
let
url = apiReq(graphListMembers, $variables)
js = await fetchRaw(url)
result = parseGraphListMembers(js, after)
url = graphListMembers ? {"variables": $variables}
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
url = apiReq(graphTweetResult, """{"rest_id": "$1"}""" % id)
js = await fetch(url)
result = parseGraphTweetResult(js)
ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
url = listTimeline ? ps
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} =
proc getUser*(username: string): Future[User] {.async.} =
if username.len == 0: return
let
ps = genParams({"screen_name": username})
json = await fetchRaw(userShow ? ps, Api.userShow)
result = parseUser(json, username)
proc getUserById*(userId: string): Future[User] {.async.} =
if userId.len == 0: return
let
ps = genParams({"user_id": userId})
json = await fetchRaw(userShow ? ps, Api.userShow)
result = parseUser(json)
proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
js = await fetch(tweetDetailUrl(id, cursor))
result = parseGraphConversation(js, id)
ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
url = timeline / (id & ".json") ? ps
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline))
proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
when T is User:
const
searchMode = ("result_filter", "user")
parse = parseUsers
fetchFunc = fetchRaw
else:
const
searchMode = ("tweet_search_mode", "live")
parse = parseTweets
fetchFunc = fetchRaw
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[T](beginning: true, query: query)
let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
try:
result = parse(await fetchFunc(url, Api.search), after)
result.query = query
except InternalError:
return Result[T](beginning: true, query: query)
proc getTweetImpl(id: string; after=""): Future[Conversation] {.async.} =
let url = tweet / (id & ".json") ? genParams(cursor=after)
result = parseConversation(await fetch(url, Api.tweet), id)
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} =
result = (await getGraphTweet(id, after)).replies
result = (await getTweetImpl(id, after)).replies
result.beginning = after.len == 0
proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
result = await getGraphTweet(id)
result = await getTweetImpl(id)
if after.len > 0:
result.replies = await getReplies(id, after)
proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Timeline(query: query, beginning: true)
var
variables = %*{
"rawQuery": q,
"query_source": "typedQuery",
"count": 20,
"product": "Latest",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
let
url = apiReq(graphSearchTimeline, $variables)
js = await fetch(url)
result = parseGraphSearch[Tweets](js, after)
result.query = query
proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
if query.text.len == 0:
return Result[User](query: query, beginning: true)
var
variables = %*{
"rawQuery": query.text,
"query_source": "typedQuery",
"count": 20,
"product": "People",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
result.beginning = false
let
url = apiReq(graphSearchTimeline, $variables)
js = await fetch(url)
result = parseGraphSearch[User](js, after)
result.query = query
proc getPhotoRail*(id: string): Future[PhotoRail] {.async.} =
if id.len == 0: return
let js = await fetch(mediaUrl(id, ""))
result = parseGraphPhotoRail(js)
proc getStatus*(id: string): Future[Tweet] {.async.} =
let url = status / (id & ".json") ? genParams()
result = parseStatus(await fetch(url, Api.status))
proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
let client = newAsyncHttpClient(maxRedirects=0)

View File

@@ -1,203 +1,121 @@
# SPDX-License-Identifier: AGPL-3.0-only
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
import jsony, packedjson, zippy, oauth1
import types, auth, consts, parserutils, http_pool, tid
import httpclient, asyncdispatch, options, strutils, uri
import jsony, packedjson, zippy
import types, tokens, consts, parserutils, http_pool
import experimental/types/common
const
rlRemaining = "x-rate-limit-remaining"
rlReset = "x-rate-limit-reset"
rlLimit = "x-rate-limit-limit"
errorsToSkip = {null, doesntExist, tweetNotFound, timeout, unauthorized, badRequest}
var
pool: HttpPool
disableTid: bool
apiProxy: string
var pool: HttpPool
proc setDisableTid*(disable: bool) =
disableTid = disable
proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
count="20"; ext=true): seq[(string, string)] =
result = timelineParams
for p in pars:
result &= p
if ext:
result &= ("ext", "mediaStats")
result &= ("include_ext_alt_text", "true")
result &= ("include_ext_media_availability", "true")
if count.len > 0:
result &= ("count", count)
if cursor.len > 0:
# The raw cursor often has plus signs, which sometimes get turned into spaces,
# so we need to them back into a plus
if " " in cursor:
result &= ("cursor", cursor.replace(" ", "+"))
else:
result &= ("cursor", cursor)
proc setApiProxy*(url: string) =
if url.len > 0:
apiProxy = url.strip(chars={'/'}) & "/"
if "http" notin apiProxy:
apiProxy = "http://" & apiProxy
proc toUrl(req: ApiReq; sessionKind: SessionKind): Uri =
case sessionKind
of oauth:
let o = req.oauth
parseUri("https://api.x.com/graphql") / o.endpoint ? o.params
of cookie:
let c = req.cookie
parseUri("https://x.com/i/api/graphql") / c.endpoint ? c.params
proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
let
encodedUrl = url.replace(",", "%2C").replace("+", "%20")
params = OAuth1Parameters(
consumerKey: consumerKey,
signatureMethod: "HMAC-SHA1",
timestamp: $int(round(epochTime())),
nonce: "0",
isIncludeVersionToHeader: true,
token: oauthToken
)
signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret)
params.signature = percentEncode(signature)
return getOauth1RequestHeader(params)["authorization"]
proc getCookieHeader(authToken, ct0: string): string =
"auth_token=" & authToken & "; ct0=" & ct0
proc genHeaders*(session: Session, url: Uri): Future[HttpHeaders] {.async.} =
proc genHeaders*(token: Token = nil): HttpHeaders =
result = newHttpHeaders({
"accept": "*/*",
"connection": "keep-alive",
"authorization": auth,
"content-type": "application/json",
"x-guest-token": if token == nil: "" else: token.tok,
"x-twitter-active-user": "yes",
"authority": "api.twitter.com",
"accept-encoding": "gzip",
"accept-language": "en-US,en;q=0.9",
"connection": "keep-alive",
"content-type": "application/json",
"origin": "https://x.com",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36",
"x-twitter-active-user": "yes",
"x-twitter-client-language": "en",
"priority": "u=1, i"
"accept": "*/*",
"DNT": "1"
})
case session.kind
of SessionKind.oauth:
result["authorization"] = getOauthHeader($url, session.oauthToken, session.oauthSecret)
of SessionKind.cookie:
result["x-twitter-auth-type"] = "OAuth2Session"
result["x-csrf-token"] = session.ct0
result["cookie"] = getCookieHeader(session.authToken, session.ct0)
result["sec-ch-ua"] = """"Google Chrome";v="142", "Chromium";v="142", "Not A(Brand";v="24""""
result["sec-ch-ua-mobile"] = "?0"
result["sec-ch-ua-platform"] = "Windows"
result["sec-fetch-dest"] = "empty"
result["sec-fetch-mode"] = "cors"
result["sec-fetch-site"] = "same-site"
if disableTid:
result["authorization"] = bearerToken2
else:
result["authorization"] = bearerToken
result["x-client-transaction-id"] = await genTid(url.path)
proc getAndValidateSession*(req: ApiReq): Future[Session] {.async.} =
result = await getSession(req)
case result.kind
of SessionKind.oauth:
if result.oauthToken.len == 0:
echo "[sessions] Empty oauth token, session: ", result.pretty
raise rateLimitError()
of SessionKind.cookie:
if result.authToken.len == 0 or result.ct0.len == 0:
echo "[sessions] Empty cookie credentials, session: ", result.pretty
raise rateLimitError()
template updateToken() =
if api != Api.search and resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
token.setRateLimit(api, remaining, reset)
template fetchImpl(result, fetchBody) {.dirty.} =
once:
pool = HttpPool()
var token = await getToken(api)
if token.tok.len == 0:
raise rateLimitError()
try:
var resp: AsyncResponse
pool.use(await genHeaders(session, url)):
template getContent =
# TODO: this is a temporary simple implementation
if apiProxy.len > 0:
resp = await c.get(($url).replace("https://", apiProxy))
else:
resp = await c.get($url)
result = await resp.body
getContent()
pool.use(genHeaders(token)):
resp = await c.get($url)
result = await resp.body
if resp.status == $Http503:
badClient = true
raise newException(BadClientError, "Bad client")
if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
limit = parseInt(resp.headers[rlLimit])
session.setRateLimit(req, remaining, reset, limit)
raise newException(InternalError, result)
if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip":
result = uncompress(result, dfGzip)
if result.startsWith("{\"errors"):
let errors = result.fromJson(Errors)
if errors notin errorsToSkip:
echo "Fetch error, API: ", url.path, ", errors: ", errors
if errors in {expiredToken, badToken, locked}:
invalidate(session)
raise rateLimitError()
elif errors in {rateLimited}:
# rate limit hit, resets after 24 hours
setLimited(session, req)
raise rateLimitError()
elif result.startsWith("429 Too Many Requests"):
echo "[sessions] 429 error, API: ", url.path, ", session: ", session.pretty
raise rateLimitError()
else:
echo "non-gzip body, url: ", url, ", body: ", result
fetchBody
release(token, used=true)
if resp.status == $Http400:
echo "ERROR 400, ", url.path, ": ", result
raise newException(InternalError, $url)
except InternalError as e:
raise e
except BadClientError as e:
raise e
except OSError as e:
raise e
except Exception as e:
let s = session.pretty
echo "error: ", e.name, ", msg: ", e.msg, ", session: ", s, ", url: ", url
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg:
release(token, invalid=true)
raise rateLimitError()
finally:
release(session)
template retry(bod) =
try:
bod
except RateLimitError:
echo "[sessions] Rate limited, retrying ", req.cookie.endpoint, " request..."
bod
proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} =
var body: string
fetchImpl body:
if body.startsWith('{') or body.startsWith('['):
result = parseJson(body)
else:
echo resp.status, ": ", body, " --- url: ", url
result = newJNull()
proc fetch*(req: ApiReq): Future[JsonNode] {.async.} =
retry:
var
body: string
session = await getAndValidateSession(req)
updateToken()
let url = req.toUrl(session.kind)
let error = result.getError
if error in {invalidToken, forbidden, badToken}:
echo "fetch error: ", result.getError
release(token, invalid=true)
raise rateLimitError()
fetchImpl body:
if body.startsWith('{') or body.startsWith('['):
result = parseJson(body)
else:
echo resp.status, ": ", body, " --- url: ", url
result = newJNull()
proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} =
fetchImpl result:
if not (result.startsWith('{') or result.startsWith('[')):
echo resp.status, ": ", result, " --- url: ", url
result.setLen(0)
let error = result.getError
if error != null and error notin errorsToSkip:
echo "Fetch error, API: ", url.path, ", error: ", error
if error in {expiredToken, badToken, locked}:
invalidate(session)
raise rateLimitError()
updateToken()
proc fetchRaw*(req: ApiReq): Future[string] {.async.} =
retry:
var session = await getAndValidateSession(req)
let url = req.toUrl(session.kind)
fetchImpl result:
if not (result.startsWith('{') or result.startsWith('[')):
echo resp.status, ": ", result, " --- url: ", url
result.setLen(0)
if result.startsWith("{\"errors"):
let errors = result.fromJson(Errors)
if errors in {invalidToken, forbidden, badToken}:
echo "fetch error: ", errors
release(token, invalid=true)
raise rateLimitError()

View File

@@ -1,210 +0,0 @@
#SPDX-License-Identifier: AGPL-3.0-only
import std/[asyncdispatch, times, json, random, strutils, tables, packedsets, os]
import types, consts
import experimental/parser/session
# max requests at a time per session to avoid race conditions
const
maxConcurrentReqs = 2
hourInSeconds = 60 * 60
var
sessionPool: seq[Session]
enableLogging = false
template log(str: varargs[string, `$`]) =
echo "[sessions] ", str.join("")
proc endpoint(req: ApiReq; session: Session): string =
case session.kind
of oauth: req.oauth.endpoint
of cookie: req.cookie.endpoint
proc pretty*(session: Session): string =
if session.isNil:
return "<null>"
if session.id > 0 and session.username.len > 0:
result = $session.id & " (" & session.username & ")"
elif session.username.len > 0:
result = session.username
elif session.id > 0:
result = $session.id
else:
result = "<unknown>"
result = $session.kind & " " & result
proc snowflakeToEpoch(flake: int64): int64 =
int64(((flake shr 22) + 1288834974657) div 1000)
proc getSessionPoolHealth*(): JsonNode =
let now = epochTime().int
var
totalReqs = 0
limited: PackedSet[int64]
reqsPerApi: Table[string, int]
oldest = now.int64
newest = 0'i64
average = 0'i64
for session in sessionPool:
let created = snowflakeToEpoch(session.id)
if created > newest:
newest = created
if created < oldest:
oldest = created
average += created
if session.limited:
limited.incl session.id
for api in session.apis.keys:
let
apiStatus = session.apis[api]
reqs = apiStatus.limit - apiStatus.remaining
# no requests made with this session and endpoint since the limit reset
if apiStatus.reset < now:
continue
reqsPerApi.mgetOrPut($api, 0).inc reqs
totalReqs.inc reqs
if sessionPool.len > 0:
average = average div sessionPool.len
else:
oldest = 0
average = 0
return %*{
"sessions": %*{
"total": sessionPool.len,
"limited": limited.card,
"oldest": $fromUnix(oldest),
"newest": $fromUnix(newest),
"average": $fromUnix(average)
},
"requests": %*{
"total": totalReqs,
"apis": reqsPerApi
}
}
proc getSessionPoolDebug*(): JsonNode =
let now = epochTime().int
var list = newJObject()
for session in sessionPool:
let sessionJson = %*{
"apis": newJObject(),
"pending": session.pending,
}
if session.limited:
sessionJson["limited"] = %true
for api in session.apis.keys:
let
apiStatus = session.apis[api]
obj = %*{}
if apiStatus.reset > now.int:
obj["remaining"] = %apiStatus.remaining
obj["reset"] = %apiStatus.reset
if "remaining" notin obj:
continue
sessionJson{"apis", $api} = obj
list[$session.id] = sessionJson
return %list
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited")
proc noSessionsError*(): ref NoSessionsError =
newException(NoSessionsError, "no sessions available")
proc isLimited(session: Session; req: ApiReq): bool =
if session.isNil:
return true
let api = req.endpoint(session)
if session.limited and api != graphUserTweetsV2:
if (epochTime().int - session.limitedAt) > hourInSeconds:
session.limited = false
log "resetting limit: ", session.pretty
return false
else:
return true
if api in session.apis:
let limit = session.apis[api]
return limit.remaining <= 10 and limit.reset > epochTime().int
else:
return false
proc isReady(session: Session; req: ApiReq): bool =
not (session.isNil or session.pending > maxConcurrentReqs or session.isLimited(req))
proc invalidate*(session: var Session) =
if session.isNil: return
log "invalidating: ", session.pretty
# TODO: This isn't sufficient, but it works for now
let idx = sessionPool.find(session)
if idx > -1: sessionPool.delete(idx)
session = nil
proc release*(session: Session) =
if session.isNil: return
dec session.pending
proc getSession*(req: ApiReq): Future[Session] {.async.} =
for i in 0 ..< sessionPool.len:
if result.isReady(req): break
result = sessionPool.sample()
if not result.isNil and result.isReady(req):
inc result.pending
else:
log "no sessions available for API: ", req.cookie.endpoint
raise noSessionsError()
proc setLimited*(session: Session; req: ApiReq) =
let api = req.endpoint(session)
session.limited = true
session.limitedAt = epochTime().int
log "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", ", session.pretty
proc setRateLimit*(session: Session; req: ApiReq; remaining, reset, limit: int) =
# avoid undefined behavior in race conditions
let api = req.endpoint(session)
if api in session.apis:
let rateLimit = session.apis[api]
if rateLimit.reset >= reset and rateLimit.remaining < remaining:
return
if rateLimit.reset == reset and rateLimit.remaining >= remaining:
session.apis[api].remaining = remaining
return
session.apis[api] = RateLimit(limit: limit, remaining: remaining, reset: reset)
proc initSessionPool*(cfg: Config; path: string) =
enableLogging = cfg.enableDebug
if path.endsWith(".json"):
log "ERROR: .json is not supported, the file must be a valid JSONL file ending in .jsonl"
quit 1
if not fileExists(path):
log "ERROR: ", path, " not found. This file is required to authenticate API requests."
quit 1
log "parsing JSONL account sessions file: ", path
for line in path.lines:
sessionPool.add parseSession(line)
log "successfully added ", sessionPool.len, " valid account sessions"

View File

@@ -40,9 +40,7 @@ proc getConfig*(path: string): (Config, parseCfg.Config) =
enableRss: cfg.get("Config", "enableRSS", true),
enableDebug: cfg.get("Config", "enableDebug", false),
proxy: cfg.get("Config", "proxy", ""),
proxyAuth: cfg.get("Config", "proxyAuth", ""),
apiProxy: cfg.get("Config", "apiProxy", ""),
disableTid: cfg.get("Config", "disableTid", false)
proxyAuth: cfg.get("Config", "proxyAuth", "")
)
return (conf, cfg)

View File

@@ -1,163 +1,59 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils
import uri, sequtils
const
consumerKey* = "3nVuSoBZnx6U4vzUxf5w"
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
bearerToken* = "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
bearerToken2* = "Bearer AAAAAAAAAAAAAAAAAAAAAFXzAwAAAAAAMHCxpeSDG1gLNLghVe8d74hl6k4%3DRUMF4xAQLsbeBhTSRrCiQpJtxoGWeyHrDb5te2jpGskWDFW82F"
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAPYXBAAAAAAACLXUNDekMxqa8h%2F40K4moUkGsoc%3DTYfbDKbT3jJPCEVnMYqilB28NHfOPqkca3qaAxGfsyKCs0wRbw"
graphUser* = "-oaLodhGbbnzJBACb1kk2Q/UserByScreenName"
graphUserV2* = "WEoGnYB0EG1yGwamDCF6zg/UserResultByScreenNameQuery"
graphUserById* = "VN33vKXrPT7p35DgNR27aw/UserResultByIdQuery"
graphUserTweetsV2* = "6QdSuZ5feXxOadEdXa4XZg/UserWithProfileTweetsQueryV2"
graphUserTweetsAndRepliesV2* = "BDX77Xzqypdt11-mDfgdpQ/UserWithProfileTweetsAndRepliesQueryV2"
graphUserTweets* = "oRJs8SLCRNRbQzuZG93_oA/UserTweets"
graphUserTweetsAndReplies* = "kkaJ0Mf34PZVarrxzLihjg/UserTweetsAndReplies"
graphUserMedia* = "36oKqyQ7E_9CmtONGjJRsA/UserMedia"
graphUserMediaV2* = "bp0e_WdXqgNBIwlLukzyYA/MediaTimelineV2"
graphTweet* = "Y4Erk_-0hObvLpz0Iw3bzA/ConversationTimeline"
graphTweetDetail* = "YVyS4SfwYW7Uw5qwy0mQCA/TweetDetail"
graphTweetResult* = "nzme9KiYhfIOrrLrPP_XeQ/TweetResultByIdQuery"
graphSearchTimeline* = "bshMIjqDk8LTXTq4w91WKw/SearchTimeline"
graphListById* = "cIUpT1UjuGgl_oWiY7Snhg/ListByRestId"
graphListBySlug* = "K6wihoTiTrzNzSF8y1aeKQ/ListBySlug"
graphListMembers* = "fuVHh5-gFn8zDBBxb8wOMA/ListMembers"
graphListTweets* = "VQf8_XQynI3WzH6xopOMMQ/ListTimeline"
api = parseUri("https://api.twitter.com")
activate* = $(api / "1.1/guest/activate.json")
gqlFeatures* = """{
"android_ad_formats_media_component_render_overlay_enabled": false,
"android_graphql_skip_api_media_color_palette": false,
"android_professional_link_spotlight_display_enabled": false,
"blue_business_profile_image_shape_enabled": false,
"commerce_android_shop_module_enabled": false,
"creator_subscriptions_subscription_count_enabled": false,
"creator_subscriptions_tweet_preview_api_enabled": true,
"freedom_of_speech_not_reach_fetch_enabled": true,
"graphql_is_translatable_rweb_tweet_is_translatable_enabled": true,
"hidden_profile_likes_enabled": false,
"highlights_tweets_tab_ui_enabled": false,
"interactive_text_enabled": false,
"longform_notetweets_consumption_enabled": true,
"longform_notetweets_inline_media_enabled": true,
"longform_notetweets_rich_text_read_enabled": true,
"longform_notetweets_richtext_consumption_enabled": true,
"mobile_app_spotlight_module_enabled": false,
"responsive_web_edit_tweet_api_enabled": true,
"responsive_web_enhance_cards_enabled": false,
"responsive_web_graphql_exclude_directive_enabled": true,
"responsive_web_graphql_skip_user_profile_image_extensions_enabled": false,
"responsive_web_graphql_timeline_navigation_enabled": true,
"responsive_web_media_download_video_enabled": false,
"responsive_web_text_conversations_enabled": false,
"responsive_web_twitter_article_tweet_consumption_enabled": true,
"unified_cards_destination_url_params_enabled": false,
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
"rweb_lists_timeline_redesign_enabled": true,
"spaces_2022_h2_clipping": true,
"spaces_2022_h2_spaces_communities": true,
"standardized_nudges_misinfo": true,
"subscriptions_verification_info_enabled": true,
"subscriptions_verification_info_reason_enabled": true,
"subscriptions_verification_info_verified_since_enabled": true,
"super_follow_badge_privacy_enabled": false,
"super_follow_exclusive_tweet_notifications_enabled": false,
"super_follow_tweet_api_enabled": false,
"super_follow_user_api_enabled": false,
"tweet_awards_web_tipping_enabled": false,
"tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": true,
"tweetypie_unmention_optimization_enabled": false,
"unified_cards_ad_metadata_container_dynamic_card_content_query_enabled": false,
"verified_phone_label_enabled": false,
"vibe_api_enabled": false,
"view_counts_everywhere_api_enabled": true,
"premium_content_api_read_enabled": false,
"communities_web_enable_tweet_community_results_fetch": true,
"responsive_web_jetfuel_frame": true,
"responsive_web_grok_analyze_button_fetch_trends_enabled": false,
"responsive_web_grok_image_annotation_enabled": true,
"responsive_web_grok_imagine_annotation_enabled": true,
"rweb_tipjar_consumption_enabled": true,
"profile_label_improvements_pcf_label_in_post_enabled": true,
"creator_subscriptions_quote_tweet_preview_enabled": false,
"c9s_tweet_anatomy_moderator_badge_enabled": true,
"responsive_web_grok_analyze_post_followups_enabled": true,
"rweb_video_timestamps_enabled": false,
"responsive_web_grok_share_attachment_enabled": true,
"articles_preview_enabled": true,
"immersive_video_status_linkable_timestamps": false,
"articles_api_enabled": false,
"responsive_web_grok_analysis_button_from_backend": true,
"rweb_video_screen_enabled": false,
"payments_enabled": false,
"responsive_web_profile_redirect_enabled": false,
"responsive_web_grok_show_grok_translated_post": false,
"responsive_web_grok_community_note_auto_translation_is_enabled": false,
"profile_label_improvements_pcf_label_in_profile_enabled": false,
"grok_android_analyze_trend_fetch_enabled": false,
"grok_translations_community_note_auto_translation_is_enabled": false,
"grok_translations_post_auto_translation_is_enabled": false,
"grok_translations_community_note_translation_is_enabled": false,
"grok_translations_timeline_user_bio_auto_translation_is_enabled": false,
"subscriptions_feature_can_gift_premium": false,
"responsive_web_twitter_article_notes_tab_enabled": false,
"subscriptions_verification_info_is_identity_verified_enabled": false,
"hidden_profile_subscriptions_enabled": false
}""".replace(" ", "").replace("\n", "")
userShow* = api / "1.1/users/show.json"
photoRail* = api / "1.1/statuses/media_timeline.json"
status* = api / "1.1/statuses/show"
search* = api / "2/search/adaptive.json"
tweetVars* = """{
"postId": "$1",
$2
"includeHasBirdwatchNotes": false,
"includePromotedContent": false,
"withBirdwatchNotes": false,
"withVoice": false,
"withV2Timeline": true
}""".replace(" ", "").replace("\n", "")
timelineApi = api / "2/timeline"
timeline* = timelineApi / "profile"
mediaTimeline* = timelineApi / "media"
listTimeline* = timelineApi / "list.json"
tweet* = timelineApi / "conversation"
tweetDetailVars* = """{
"focalTweetId": "$1",
$2
"referrer": "profile",
"with_rux_injections": false,
"rankingMode": "Relevance",
"includePromotedContent": true,
"withCommunity": true,
"withQuickPromoteEligibilityTweetFields": true,
"withBirdwatchNotes": true,
"withVoice": true
}""".replace(" ", "").replace("\n", "")
graphql = api / "graphql"
graphUser* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId"
graphList* = graphql / "JADTh6cjebfgetzvF3tQvQ/List"
graphListBySlug* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug"
graphListMembers* = graphql / "Ke6urWMeCV2UlKXGRy4sow/ListMembers"
restIdVars* = """{
"rest_id": "$1", $2
"count": 20
}"""
timelineParams* = {
"include_profile_interstitial_type": "0",
"include_blocking": "0",
"include_blocked_by": "0",
"include_followed_by": "0",
"include_want_retweets": "0",
"include_mute_edge": "0",
"include_can_dm": "0",
"include_can_media_tag": "1",
"skip_status": "1",
"cards_platform": "Web-12",
"include_cards": "1",
"include_composer_source": "false",
"include_reply_count": "1",
"tweet_mode": "extended",
"include_entities": "true",
"include_user_entities": "true",
"include_ext_media_color": "false",
"send_error_codes": "true",
"simple_quoted_tweet": "true",
"include_quote_count": "true"
}.toSeq
userMediaVars* = """{
"userId": "$1", $2
"count": 20,
"includePromotedContent": false,
"withClientEventToken": false,
"withBirdwatchNotes": false,
"withVoice": true
}""".replace(" ", "").replace("\n", "")
userTweetsVars* = """{
"userId": "$1", $2
"count": 20,
"includePromotedContent": false,
"withQuickPromoteEligibilityTweetFields": true,
"withVoice": true
}""".replace(" ", "").replace("\n", "")
userTweetsAndRepliesVars* = """{
"userId": "$1", $2
"count": 20,
"includePromotedContent": false,
"withCommunity": true,
"withVoice": true
}""".replace(" ", "").replace("\n", "")
userFieldToggles = """{"withPayments":false,"withAuxiliaryUserLabels":true}"""
userTweetsFieldToggles* = """{"withArticlePlainText":false}"""
tweetDetailFieldToggles* = """{"withArticleRichContentState":true,"withArticlePlainText":false,"withGrokAnalyze":false,"withDisallowedReplyControls":false}"""
searchParams* = {
"query_source": "typed_query",
"pc": "1",
"spelling_corrections": "1"
}.toSeq
## top: nothing
## latest: "tweet_search_mode: live"
## user: "result_filter: user"
## photos: "result_filter: photos"
## videos: "result_filter: videos"

View File

@@ -1,2 +1,2 @@
import parser/[user, graphql]
export user, graphql
import parser/[user, graphql, timeline]
export user, graphql, timeline

View File

@@ -1,53 +1,11 @@
import options, strutils
import jsony
import user, utils, ../types/[graphuser, graphlistmembers]
from ../../types import User, VerifiedType, Result, Query, QueryKind
proc parseUserResult*(userResult: UserResult): User =
result = userResult.legacy
if result.verifiedType == none and userResult.isBlueVerified:
result.verifiedType = blue
if result.username.len == 0 and userResult.core.screenName.len > 0:
result.id = userResult.restId
result.username = userResult.core.screenName
result.fullname = userResult.core.name
result.userPic = userResult.avatar.imageUrl.replace("_normal", "")
if userResult.privacy.isSome:
result.protected = userResult.privacy.get.protected
if userResult.location.isSome:
result.location = userResult.location.get.location
if userResult.core.createdAt.len > 0:
result.joinDate = parseTwitterDate(userResult.core.createdAt)
if userResult.verification.isSome:
let v = userResult.verification.get
if v.verifiedType != VerifiedType.none:
result.verifiedType = v.verifiedType
if userResult.profileBio.isSome and result.bio.len == 0:
result.bio = userResult.profileBio.get.description
import user, ../types/[graphuser, graphlistmembers]
from ../../types import User, Result, Query, QueryKind
proc parseGraphUser*(json: string): User =
if json.len == 0 or json[0] != '{':
return
let
raw = json.fromJson(GraphUser)
userResult =
if raw.data.userResult.isSome: raw.data.userResult.get.result
elif raw.data.user.isSome: raw.data.user.get.result
else: UserResult()
if userResult.unavailableReason.get("") == "Suspended" or
userResult.reason.get("") == "Suspended":
return User(suspended: true)
result = parseUserResult(userResult)
let raw = json.fromJson(GraphUser)
result = toUser raw.data.user.result.legacy
result.id = raw.data.user.result.restId
proc parseGraphListMembers*(json, cursor: string): Result[User] =
result = Result[User](
@@ -63,7 +21,7 @@ proc parseGraphListMembers*(json, cursor: string): Result[User] =
of TimelineTimelineItem:
let userResult = entry.content.itemContent.userResults.result
if userResult.restId.len > 0:
result.content.add parseUserResult(userResult)
result.content.add toUser userResult.legacy
of TimelineTimelineCursor:
if entry.content.cursorType == "Bottom":
result.bottom = entry.content.value

View File

@@ -0,0 +1,44 @@
import std/[json, strutils, times, math]
import utils
import ".."/types/[media, tweet]
from ../../types import Poll, Gif, Video, VideoVariant, VideoType
proc parseVideo*(entity: Entity): Video =
result = Video(
thumb: entity.mediaUrlHttps.getImageUrl,
views: entity.ext.mediaStats{"r", "ok", "viewCount"}.getStr,
available: entity.extMediaAvailability.status == "available",
title: entity.extAltText,
durationMs: entity.videoInfo.durationMillis,
description: entity.additionalMediaInfo.description,
variants: entity.videoInfo.variants
# playbackType: mp4
)
if entity.additionalMediaInfo.title.len > 0:
result.title = entity.additionalMediaInfo.title
proc parseGif*(entity: Entity): Gif =
result = Gif(
url: entity.videoInfo.variants[0].url.getImageUrl,
thumb: entity.getImageUrl
)
proc parsePoll*(card: Card): Poll =
let vals = card.bindingValues
# name format is pollNchoice_*
for i in '1' .. card.name[4]:
let choice = "choice" & i
result.values.add parseInt(vals{choice & "_count", "string_value"}.getStr("0"))
result.options.add vals{choice & "_label", "string_value"}.getStr
let time = vals{"end_datetime_utc", "string_value"}.getStr.parseIsoDate
if time > now():
let timeLeft = $(time - now())
result.status = timeLeft[0 ..< timeLeft.find(",")]
else:
result.status = "Final results"
result.leader = result.values.find(max(result.values))
result.votes = result.values.sum

View File

@@ -1,30 +0,0 @@
import std/strutils
import jsony
import ../types/session
from ../../types import Session, SessionKind
proc parseSession*(raw: string): Session =
let session = raw.fromJson(RawSession)
let kind = if session.kind == "": "oauth" else: session.kind
case kind
of "oauth":
let id = session.oauthToken[0 ..< session.oauthToken.find('-')]
result = Session(
kind: SessionKind.oauth,
id: parseBiggestInt(id),
username: session.username,
oauthToken: session.oauthToken,
oauthSecret: session.oauthTokenSecret
)
of "cookie":
let id = if session.id.len > 0: parseBiggestInt(session.id) else: 0
result = Session(
kind: SessionKind.cookie,
id: id,
username: session.username,
authToken: session.authToken,
ct0: session.ct0
)
else:
raise newException(ValueError, "Unknown session kind: " & kind)

View File

@@ -1,15 +1,14 @@
import std/[macros, htmlgen, unicode]
import ../types/common
import ".."/../[formatters, utils]
type
ReplaceSliceKind = enum
ReplaceSliceKind* = enum
rkRemove, rkUrl, rkHashtag, rkMention
ReplaceSlice* = object
slice: Slice[int]
kind: ReplaceSliceKind
url, display: string
slice*: Slice[int]
kind*: ReplaceSliceKind
url*, display*: string
proc cmp*(x, y: ReplaceSlice): int = cmp(x.slice.a, y.slice.b)
@@ -27,11 +26,14 @@ proc dedupSlices*(s: var seq[ReplaceSlice]) =
inc j
inc i
proc extractUrls*(result: var seq[ReplaceSlice]; url: Url;
textLen: int; hideTwitter = false) =
proc extractHashtags*(result: var seq[ReplaceSlice]; slice: Slice[int]) =
result.add ReplaceSlice(kind: rkHashtag, slice: slice)
proc extractUrls*[T](result: var seq[ReplaceSlice]; entity: T;
textLen: int; hideTwitter = false) =
let
link = url.expandedUrl
slice = url.indices[0] ..< url.indices[1]
link = entity.expandedUrl
slice = entity.indices
if hideTwitter and slice.b.succ >= textLen and link.isTwitterUrl:
if slice.a < textLen:

View File

@@ -1,8 +0,0 @@
import jsony
import ../types/tid
export TidPair
proc parseTidPairs*(raw: string): seq[TidPair] =
result = raw.fromJson(seq[TidPair])
if result.len == 0:
raise newException(ValueError, "Parsing pairs failed: " & raw)

View File

@@ -0,0 +1,84 @@
import std/[strutils, tables, options]
import jsony
import user, tweet, utils, ../types/timeline
from ../../types import Result, User, Tweet
proc parseHook(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]
proc getId(id: string): string {.inline.} =
let start = id.rfind("-")
if start < 0: return id
id[start + 1 ..< id.len]
proc processTweet(id: string; objects: GlobalObjects;
userCache: var Table[string, User]): Tweet =
let raw = objects.tweets[id]
result = toTweet raw
let uid = result.user.id
if uid.len > 0 and uid in objects.users:
if uid notin userCache:
userCache[uid] = toUser objects.users[uid]
result.user = userCache[uid]
let rtId = raw.retweetedStatusIdStr
if rtId.len > 0:
if rtId in objects.tweets:
result.retweet = some processTweet(rtId, objects, userCache)
else:
result.retweet = some Tweet(id: rtId.toId)
let qId = raw.quotedStatusIdStr
if qId.len > 0:
if qId in objects.tweets:
result.quote = some processTweet(qId, objects, userCache)
else:
result.quote = some Tweet(id: qId.toId)
proc parseCursor[T](e: Entry; result: var Result[T]) =
let cursor = e.content.operation.cursor
if cursor.cursorType == "Top":
result.top = cursor.value
elif cursor.cursorType == "Bottom":
result.bottom = cursor.value
proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0)
let raw = json.fromJson(Search)
if raw.timeline.instructions.len == 0:
return
for e in raw.timeline.instructions[0].addEntries.entries:
let
eId = e.entryId
id = eId.getId
if eId.startsWith("user") or eId.startsWith("sq-U"):
if id in raw.globalObjects.users:
result.content.add toUser raw.globalObjects.users[id]
elif eId.startsWith("cursor") or eId.startsWith("sq-C"):
parseCursor(e, result)
proc parseTweets*(json: string; after=""): Result[Tweet] =
result = Result[Tweet](beginning: after.len == 0)
let raw = json.fromJson(Search)
if raw.timeline.instructions.len == 0:
return
var userCache: Table[string, User]
for e in raw.timeline.instructions[0].addEntries.entries:
let
eId = e.entryId
id = eId.getId
if eId.startsWith("tweet") or eId.startsWith("sq-I-t"):
if id in raw.globalObjects.tweets:
result.content.add processTweet(id, raw.globalObjects, userCache)
elif eId.startsWith("cursor") or eId.startsWith("sq-C"):
parseCursor(e, result)

View File

@@ -0,0 +1,97 @@
import std/[strutils, options, algorithm, json]
import std/unicode except strip
import utils, slices, media, user
import ../types/tweet
from ../types/media as mediaTypes import MediaType
from ../../types import Tweet, User, TweetStats
proc expandTweetEntities(tweet: var Tweet; raw: RawTweet) =
let
orig = raw.fullText.toRunes
textRange = raw.displayTextRange
textSlice = textRange[0] .. textRange[1]
hasCard = raw.card.isSome
var replyTo = ""
if tweet.replyId > 0:
tweet.reply.add raw.inReplyToScreenName
replyTo = raw.inReplyToScreenName
var replacements = newSeq[ReplaceSlice]()
for u in raw.entities.urls:
if u.url.len == 0 or u.url notin raw.fullText:
continue
replacements.extractUrls(u, textSlice.b, hideTwitter=raw.isQuoteStatus)
# if hasCard and u.url == get(tweet.card).url:
# get(tweet.card).url = u.expandedUrl
for m in raw.entities.media:
replacements.extractUrls(m, textSlice.b, hideTwitter=true)
for hashtag in raw.entities.hashtags:
replacements.extractHashtags(hashtag.indices)
for symbol in raw.entities.symbols:
replacements.extractHashtags(symbol.indices)
for mention in raw.entities.userMentions:
let
name = mention.screenName
idx = tweet.reply.find(name)
if mention.indices.a >= textSlice.a:
replacements.add ReplaceSlice(kind: rkMention, slice: mention.indices,
url: "/" & name, display: mention.name)
if idx > -1 and name != replyTo:
tweet.reply.delete idx
elif idx == -1 and tweet.replyId != 0:
tweet.reply.add name
replacements.dedupSlices
replacements.sort(cmp)
tweet.text = orig.replacedWith(replacements, textSlice)
.strip(leading=false)
proc toTweet*(raw: RawTweet): Tweet =
result = Tweet(
id: raw.idStr.toId,
threadId: raw.conversationIdStr.toId,
replyId: raw.inReplyToStatusIdStr.toId,
time: parseTwitterDate(raw.createdAt),
hasThread: raw.selfThread.idStr.len > 0,
available: true,
user: User(id: raw.userIdStr),
stats: TweetStats(
replies: raw.replyCount,
retweets: raw.retweetCount,
likes: raw.favoriteCount,
quotes: raw.quoteCount
)
)
result.expandTweetEntities(raw)
if raw.card.isSome:
let card = raw.card.get
if "poll" in card.name:
result.poll = some parsePoll(card)
if "image" in card.name:
result.photos.add card.bindingValues{"image_large", "image_value", "url"}
.getStr.getImageUrl
# elif card.name == "amplify":
# discard
# # result.video = some(parsePromoVideo(jsCard{"binding_values"}))
# else:
# result.card = some parseCard(card, raw.entities.urls)
for m in raw.extendedEntities.media:
case m.kind
of photo: result.photos.add m.getImageUrl
of video:
result.video = some parseVideo(m)
if m.additionalMediaInfo.sourceUser.isSome:
result.attribution = some toUser get(m.additionalMediaInfo.sourceUser)
of animatedGif: result.gif = some parseGif(m)

View File

@@ -1,12 +1,8 @@
import std/[options, tables, strutils, strformat, sugar]
import std/[options, tables, strformat]
import jsony
import user, ../types/unifiedcard
import ../../formatters
import utils
import ".."/types/[unifiedcard, media]
from ../../types import Card, CardKind, Video
from ../../utils import twimg, https
proc getImageUrl(entity: MediaEntity): string =
entity.mediaUrlHttps.dup(removePrefix(twimg), removePrefix(https))
proc parseDestination(id: string; card: UnifiedCard; result: var Card) =
let destination = card.destinationObjects[id].data
@@ -28,14 +24,6 @@ proc parseMediaDetails(data: ComponentData; card: UnifiedCard; result: var Card)
result.text = data.topicDetail.title
result.dest = "Topic"
proc parseJobDetails(data: ComponentData; card: UnifiedCard; result: var Card) =
data.destination.parseDestination(card, result)
result.kind = CardKind.jobDetails
result.title = data.title
result.text = data.shortDescriptionText
result.dest = &"@{data.profileUser.username} · {data.location}"
proc parseAppDetails(data: ComponentData; card: UnifiedCard; result: var Card) =
let app = card.appStoreData[data.appId][0]
@@ -75,20 +63,7 @@ proc parseMedia(component: Component; card: UnifiedCard; result: var Card) =
durationMs: videoInfo.durationMillis,
variants: videoInfo.variants
)
of model3d:
result.title = "Unsupported 3D model ad"
proc parseGrokShare(data: ComponentData; card: UnifiedCard; result: var Card) =
result.kind = summaryLarge
data.destination.parseDestination(card, result)
result.dest = "Answer by Grok"
for msg in data.conversationPreview:
if msg.sender == "USER":
result.title = msg.message.shorten(70)
elif msg.sender == "AGENT":
result.text = msg.message.shorten(500)
of animatedGif: discard
proc parseUnifiedCard*(json: string): Card =
let card = json.fromJson(UnifiedCard)
@@ -101,18 +76,10 @@ proc parseUnifiedCard*(json: string): Card =
component.data.parseAppDetails(card, result)
of mediaWithDetailsHorizontal:
component.data.parseMediaDetails(card, result)
of media, swipeableMedia:
of ComponentType.media, swipeableMedia:
component.parseMedia(card, result)
of buttonGroup:
discard
of grokShare:
component.data.parseGrokShare(card, result)
of ComponentType.jobDetails:
component.data.parseJobDetails(card, result)
of ComponentType.hidden:
result.kind = CardKind.hidden
of ComponentType.unknown:
echo "ERROR: Unknown component type: ", json
case component.kind
of twitterListDetails:

View File

@@ -1,14 +1,14 @@
import std/[algorithm, unicode, re, strutils, strformat, options, nre]
import std/[algorithm, unicode, re, strutils, strformat, options]
import jsony
import utils, slices
import ../types/user as userType
from ../../types import Result, User, Error
from ../../types import User, Error
let
unRegex = re.re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
unReplace = "$1<a href=\"/$2\">@$2</a>"
htRegex = nre.re"""(*U)(^|[^\w-_.?])([#$])([\w_]*+)(?!</a>|">|#)"""
htRegex = re"(^|[^\w-_./?])([#$])([\w_]+)"
htReplace = "$1<a href=\"/search?q=%23$3\">$2$3</a>"
proc expandUserEntities(user: var User; raw: RawUser) =
@@ -29,7 +29,7 @@ proc expandUserEntities(user: var User; raw: RawUser) =
user.bio = orig.replacedWith(replacements, 0 .. orig.len)
.replacef(unRegex, unReplace)
.replace(htRegex, htReplace)
.replacef(htRegex, htReplace)
proc getBanner(user: RawUser): string =
if user.profileBannerUrl.len > 0:
@@ -56,21 +56,23 @@ proc toUser*(raw: RawUser): User =
tweets: raw.statusesCount,
likes: raw.favouritesCount,
media: raw.mediaCount,
verifiedType: raw.verifiedType,
verified: raw.verified,
protected: raw.protected,
joinDate: parseTwitterDate(raw.createdAt),
banner: getBanner(raw),
userPic: getImageUrl(raw.profileImageUrlHttps).replace("_normal", "")
)
if raw.createdAt.len > 0:
result.joinDate = parseTwitterDate(raw.createdAt)
if raw.pinnedTweetIdsStr.len > 0:
result.pinnedTweet = parseBiggestInt(raw.pinnedTweetIdsStr[0])
result.expandUserEntities(raw)
proc parseHook*(s: string; i: var int; v: var User) =
var u: RawUser
parseHook(s, i, u)
v = toUser u
proc parseUser*(json: string; username=""): User =
handleErrors:
case error.code
of suspended: return User(username: username, suspended: true)
of userNotFound: return
else: echo "[error - parseUser]: ", error
result = toUser json.fromJson(RawUser)

View File

@@ -1,12 +1,16 @@
# SPDX-License-Identifier: AGPL-3.0-only
import std/[sugar, strutils, times]
import ../types/common
import ".."/types/[common, media, tweet]
import ../../utils as uutils
template parseTime(time: string; f: static string; flen: int): DateTime =
if time.len != flen: return
parse(time, f, utc())
proc toId*(id: string): int64 =
if id.len == 0: 0'i64
else: parseBiggestInt(id)
proc parseIsoDate*(date: string): DateTime =
date.parseTime("yyyy-MM-dd\'T\'HH:mm:ss\'Z\'", 20)
@@ -16,6 +20,9 @@ proc parseTwitterDate*(date: string): DateTime =
proc getImageUrl*(url: string): string =
url.dup(removePrefix(twimg), removePrefix(https))
proc getImageUrl*(entity: MediaEntity | Entity): string =
entity.mediaUrlHttps.getImageUrl
template handleErrors*(body) =
if json.startsWith("{\"errors"):
for error {.inject.} in json.fromJson(Errors).errors:

View File

@@ -1,3 +1,4 @@
import jsony
from ../../types import Error
type
@@ -5,7 +6,7 @@ type
url*: string
expandedUrl*: string
displayUrl*: string
indices*: array[2, int]
indices*: Slice[int]
ErrorObj* = object
code*: Error
@@ -18,3 +19,8 @@ proc contains*(codes: set[Error]; errors: Errors): bool =
for e in errors.errors:
if e.code in codes:
return true
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -1,48 +1,12 @@
import options, strutils
from ../../types import User, VerifiedType
import user
type
GraphUser* = object
data*: tuple[userResult: Option[UserData], user: Option[UserData]]
data*: tuple[user: UserData]
UserData* = object
result*: UserResult
UserCore* = object
name*: string
screenName*: string
createdAt*: string
UserBio* = object
description*: string
UserAvatar* = object
imageUrl*: string
Verification* = object
verifiedType*: VerifiedType
Location* = object
location*: string
Privacy* = object
protected*: bool
UserResult* = object
legacy*: User
UserResult = object
legacy*: RawUser
restId*: string
isBlueVerified*: bool
core*: UserCore
avatar*: UserAvatar
unavailableReason*: Option[string]
reason*: Option[string]
privacy*: Option[Privacy]
profileBio*: Option[UserBio]
verification*: Option[Verification]
location*: Option[Location]
proc enumHook*(s: string; v: var VerifiedType) =
v = try:
parseEnum[VerifiedType](s)
except:
VerifiedType.none

View File

@@ -0,0 +1,15 @@
import options
from ../../types import VideoType, VideoVariant
type
MediaType* = enum
photo, video, animatedGif
MediaEntity* = object
kind*: MediaType
mediaUrlHttps*: string
videoInfo*: Option[VideoInfo]
VideoInfo* = object
durationMillis*: int
variants*: seq[VideoVariant]

View File

@@ -1,9 +0,0 @@
type
RawSession* = object
kind*: string
id*: string
username*: string
oauthToken*: string
oauthTokenSecret*: string
authToken*: string
ct0*: string

View File

@@ -1,4 +0,0 @@
type
TidPair* = object
animationKey*: string
verification*: string

View File

@@ -0,0 +1,28 @@
import std/tables
import user, tweet
type
Search* = object
globalObjects*: GlobalObjects
timeline*: Timeline
GlobalObjects* = object
users*: Table[string, RawUser]
tweets*: Table[string, RawTweet]
Timeline = object
instructions*: seq[Instructions]
Instructions = object
addEntries*: tuple[entries: seq[Entry]]
Entry* = object
entryId*: string
content*: tuple[operation: Operation]
Operation = object
cursor*: tuple[value, cursorType: string]
proc renameHook*(v: var Entity; fieldName: var string) =
if fieldName == "type":
fieldName = "kind"

View File

@@ -0,0 +1,85 @@
import options
import jsony
from json import JsonNode
import user, media, common
type
RawTweet* = object
createdAt*: string
idStr*: string
fullText*: string
displayTextRange*: array[2, int]
entities*: Entities
extendedEntities*: ExtendedEntities
inReplyToStatusIdStr*: string
inReplyToScreenName*: string
userIdStr*: string
isQuoteStatus*: bool
replyCount*: int
retweetCount*: int
favoriteCount*: int
quoteCount*: int
conversationIdStr*: string
favorited*: bool
retweeted*: bool
selfThread*: tuple[idStr: string]
card*: Option[Card]
quotedStatusIdStr*: string
retweetedStatusIdStr*: string
Card* = object
name*: string
url*: string
bindingValues*: JsonNode
Entities* = object
hashtags*: seq[Hashtag]
symbols*: seq[Hashtag]
userMentions*: seq[UserMention]
urls*: seq[Url]
media*: seq[Entity]
Hashtag* = object
indices*: Slice[int]
UserMention* = object
screenName*: string
name*: string
indices*: Slice[int]
ExtendedEntities* = object
media*: seq[Entity]
Entity* = object
kind*: MediaType
indices*: Slice[int]
mediaUrlHttps*: string
url*: string
expandedUrl*: string
videoInfo*: VideoInfo
ext*: Ext
extMediaAvailability*: tuple[status: string]
extAltText*: string
additionalMediaInfo*: AdditionalMediaInfo
sourceStatusIdStr*: string
sourceUserIdStr*: string
AdditionalMediaInfo* = object
sourceUser*: Option[RawUser]
title*: string
description*: string
Ext* = object
mediaStats*: JsonNode
MediaStats* = object
ok*: tuple[viewCount: string]
proc renameHook*(v: var Entity; fieldName: var string) =
if fieldName == "type":
fieldName = "kind"
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -1,10 +1,7 @@
import std/[options, tables, times]
import jsony
from ../../types import VideoType, VideoVariant, User
import options, tables
import media as mediaTypes
type
Text* = distinct string
UnifiedCard* = object
componentObjects*: Table[string, Component]
destinationObjects*: Table[string, Destination]
@@ -16,14 +13,10 @@ type
media
swipeableMedia
buttonGroup
jobDetails
appStoreDetails
twitterListDetails
communityDetails
mediaWithDetailsHorizontal
hidden
grokShare
unknown
Component* = object
kind*: ComponentType
@@ -34,40 +27,24 @@ type
appId*: string
mediaId*: string
destination*: string
location*: string
title*: Text
subtitle*: Text
name*: Text
memberCount*: int
mediaList*: seq[MediaItem]
topicDetail*: tuple[title: Text]
profileUser*: User
shortDescriptionText*: string
conversationPreview*: seq[GrokConversation]
MediaItem* = object
id*: string
destination*: string
Destination* = object
kind*: string
data*: tuple[urlData: UrlData]
UrlData* = object
url*: string
vanity*: string
MediaType* = enum
photo, video, model3d
MediaEntity* = object
kind*: MediaType
mediaUrlHttps*: string
videoInfo*: Option[VideoInfo]
VideoInfo* = object
durationMillis*: int
variants*: seq[VideoVariant]
Destination* = object
kind*: string
data*: tuple[urlData: UrlData]
AppType* = enum
androidApp, iPhoneApp, iPadApp
@@ -78,58 +55,13 @@ type
title*: Text
category*: Text
GrokConversation* = object
message*: string
sender*: string
Text = object
content: string
TypeField = Component | Destination | MediaEntity | AppStoreData
HasTypeField = Component | Destination | MediaEntity | AppStoreData
converter fromText*(text: Text): string = string(text)
converter fromText*(text: Text): string = text.content
proc renameHook*(v: var TypeField; fieldName: var string) =
proc renameHook*(v: var HasTypeField; fieldName: var string) =
if fieldName == "type":
fieldName = "kind"
proc enumHook*(s: string; v: var ComponentType) =
v = case s
of "details": details
of "media": media
of "swipeable_media": swipeableMedia
of "button_group": buttonGroup
of "job_details": jobDetails
of "app_store_details": appStoreDetails
of "twitter_list_details": twitterListDetails
of "community_details": communityDetails
of "media_with_details_horizontal": mediaWithDetailsHorizontal
of "commerce_drop_details": hidden
of "grok_share": grokShare
else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown
proc enumHook*(s: string; v: var AppType) =
v = case s
of "android_app": androidApp
of "iphone_app": iPhoneApp
of "ipad_app": iPadApp
else: echo "ERROR: Unknown enum value (AppType): ", s; androidApp
proc enumHook*(s: string; v: var MediaType) =
v = case s
of "video": video
of "photo": photo
of "model3d": model3d
else: echo "ERROR: Unknown enum value (MediaType): ", s; photo
proc parseHook*(s: string; i: var int; v: var DateTime) =
var str: string
parseHook(s, i, str)
v = parse(str, "yyyy-MM-dd hh:mm:ss")
proc parseHook*(s: string; i: var int; v: var Text) =
if s[i] == '"':
var str: string
parseHook(s, i, str)
v = Text(str)
else:
var t: tuple[content: string]
parseHook(s, i, t)
v = Text(t.content)

View File

@@ -1,6 +1,6 @@
import options
import jsony
import common
from ../../types import VerifiedType
type
RawUser* = object
@@ -16,7 +16,7 @@ type
favouritesCount*: int
statusesCount*: int
mediaCount*: int
verifiedType*: VerifiedType
verified*: bool
protected*: bool
profileLinkColor*: string
profileBannerUrl*: string
@@ -42,3 +42,8 @@ type
Color* = object
red*, green*, blue*: int
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -1,20 +1,19 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, times, uri, tables, xmltree, htmlparser, htmlgen, math
import strutils, strformat, times, uri, tables, xmltree, htmlparser, htmlgen
import std/[enumerate, re]
import types, utils, query
const
cards = "cards.twitter.com/cards"
tco = "https://t.co"
twitter = parseUri("https://x.com")
twitter = parseUri("https://twitter.com")
let
twRegex = re"(?<=(?<!\S)https:\/\/|(?<=\s))(www\.|mobile\.)?twitter\.com"
twLinkRegex = re"""<a href="https:\/\/twitter.com([^"]+)">twitter\.com(\S+)</a>"""
xRegex = re"(?<=(?<!\S)https:\/\/|(?<=\s))(www\.|mobile\.)?x\.com"
xLinkRegex = re"""<a href="https:\/\/x.com([^"]+)">x\.com(\S+)</a>"""
ytRegex = re(r"([A-z.]+\.)?youtu(be\.com|\.be)", {reStudy, reIgnoreCase})
ytRegex = re"([A-z.]+\.)?youtu(be\.com|\.be)"
igRegex = re"(www\.)?instagram\.com"
rdRegex = re"(?<![.b])((www|np|new|amp|old)\.)?reddit.com"
rdShortRegex = re"(?<![.b])redd\.it\/"
@@ -33,13 +32,10 @@ proc getUrlPrefix*(cfg: Config): string =
if cfg.useHttps: https & cfg.hostname
else: "http://" & cfg.hostname
proc shorten*(text: string; length=28): string =
result = text
proc shortLink*(text: string; length=28): string =
result = text.replace(wwwRegex, "")
if result.len > length:
result = result[0 ..< length] & ""
proc shortLink*(text: string; length=28): string =
result = text.replace(wwwRegex, "").shorten(length)
proc stripHtml*(text: string; shorten=false): string =
var html = parseHtml(text)
@@ -59,32 +55,28 @@ proc replaceUrls*(body: string; prefs: Prefs; absolute=""): string =
result = body
if prefs.replaceYouTube.len > 0 and "youtu" in result:
let youtubeHost = strip(prefs.replaceYouTube, chars={'/'})
result = result.replace(ytRegex, youtubeHost)
result = result.replace(ytRegex, prefs.replaceYouTube)
if prefs.replaceYouTube in result:
result = result.replace("/c/", "/")
if prefs.replaceTwitter.len > 0:
let twitterHost = strip(prefs.replaceTwitter, chars={'/'})
if tco in result:
result = result.replace(tco, https & twitterHost & "/t.co")
if "x.com" in result:
result = result.replace(xRegex, twitterHost)
result = result.replacef(xLinkRegex, a(
twitterHost & "$2", href = https & twitterHost & "$1"))
if "twitter.com" in result:
result = result.replace(cards, twitterHost & "/cards")
result = result.replace(twRegex, twitterHost)
result = result.replacef(twLinkRegex, a(
twitterHost & "$2", href = https & twitterHost & "$1"))
if prefs.replaceTwitter.len > 0 and ("twitter.com" in body or tco in body):
result = result.replace(tco, https & prefs.replaceTwitter & "/t.co")
result = result.replace(cards, prefs.replaceTwitter & "/cards")
result = result.replace(twRegex, prefs.replaceTwitter)
result = result.replacef(twLinkRegex, a(
prefs.replaceTwitter & "$2", href = https & prefs.replaceTwitter & "$1"))
if prefs.replaceReddit.len > 0 and ("reddit.com" in result or "redd.it" in result):
let redditHost = strip(prefs.replaceReddit, chars={'/'})
result = result.replace(rdShortRegex, redditHost & "/comments/")
result = result.replace(rdRegex, redditHost)
if redditHost in result and "/gallery/" in result:
result = result.replace(rdShortRegex, prefs.replaceReddit & "/comments/")
result = result.replace(rdRegex, prefs.replaceReddit)
if prefs.replaceReddit in result and "/gallery/" in result:
result = result.replace("/gallery/", "/comments/")
if prefs.replaceInstagram.len > 0 and "instagram.com" in result:
result = result.replace(igRegex, prefs.replaceInstagram)
if absolute.len > 0 and "href" in result:
result = result.replace("href=\"/", &"href=\"{absolute}/")
result = result.replace("href=\"/", "href=\"" & absolute & "/")
proc getM3u8Url*(content: string): string =
var matches: array[1, string]
@@ -96,8 +88,6 @@ proc proxifyVideo*(manifest: string; proxy: bool): string =
for line in manifest.splitLines:
let url =
if line.startsWith("#EXT-X-MAP:URI"): line[16 .. ^2]
elif line.startsWith("#EXT-X-MEDIA") and "URI=" in line:
line[line.find("URI=") + 5 .. -1 + line.find("\"", start= 5 + line.find("URI="))]
else: line
if url.startsWith('/'):
let path = "https://video.twimg.com" & url
@@ -154,17 +144,6 @@ proc getShortTime*(tweet: Tweet): string =
else:
result = "now"
proc getDuration*(video: Video): string =
let
ms = video.durationMs
sec = int(round(ms / 1000))
min = floorDiv(sec, 60)
hour = floorDiv(min, 60)
if hour > 0:
return &"{hour}:{min mod 60}:{sec mod 60:02}"
else:
return &"{min mod 60}:{sec mod 60:02}"
proc getLink*(tweet: Tweet; focus=true): string =
if tweet.id == 0: return
var username = tweet.user.username

View File

@@ -39,11 +39,8 @@ template use*(pool: HttpPool; heads: HttpHeaders; body: untyped): untyped =
try:
body
except BadClientError, ProtocolError:
# Twitter returned 503 or closed the connection, we need a new client
pool.release(c, true)
badClient = false
c = pool.acquire(heads)
except ProtocolError:
# Twitter closed the connection, retry
body
finally:
pool.release(c, badClient)

View File

@@ -6,7 +6,7 @@ from os import getEnv
import jester
import types, config, prefs, formatters, redis_cache, http_pool, auth, apiutils
import types, config, prefs, formatters, redis_cache, http_pool, tokens
import views/[general, about]
import routes/[
preferences, timeline, status, media, search, rss, list, debug,
@@ -15,13 +15,8 @@ import routes/[
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
const issuesUrl = "https://github.com/zedeus/nitter/issues"
let
configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf")
(cfg, fullCfg) = getConfig(configPath)
sessionsPath = getEnv("NITTER_SESSIONS_FILE", "./sessions.jsonl")
initSessionPool(cfg, sessionsPath)
let configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf")
let (cfg, fullCfg) = getConfig(configPath)
if not cfg.enableDebug:
# Silence Jester's query warning
@@ -37,14 +32,14 @@ setHmacKey(cfg.hmacKey)
setProxyEncoding(cfg.base64Media)
setMaxHttpConns(cfg.httpMaxConns)
setHttpProxy(cfg.proxy, cfg.proxyAuth)
setApiProxy(cfg.apiProxy)
setDisableTid(cfg.disableTid)
initAboutPage(cfg.staticDir)
waitFor initRedisPool(cfg)
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
stdout.flushFile
asyncCheck initTokenPool(cfg)
createUnsupportedRouter(cfg)
createResolverRouter(cfg)
createPrefRouter(cfg)
@@ -61,7 +56,6 @@ settings:
port = Port(cfg.port)
staticDir = cfg.staticDir
bindAddr = cfg.address
reusePort = true
routes:
get "/":
@@ -90,28 +84,20 @@ routes:
resp Http500, showError(
&"An error occurred, please {link} with the URL you tried to visit.", cfg)
error BadClientError:
echo error.exc.name, ": ", error.exc.msg
resp Http500, showError("Network error occurred, please try again.", cfg)
error RateLimitError:
echo error.exc.name, ": ", error.exc.msg
const link = a("another instance", href = instancesUrl)
resp Http429, showError(
&"Instance has been rate limited.<br>Use {link} or try again later.", cfg)
error NoSessionsError:
const link = a("another instance", href = instancesUrl)
resp Http429, showError(
&"Instance has no auth tokens, or is fully rate limited.<br>Use {link} or try again later.", cfg)
extend rss, ""
extend status, ""
extend search, ""
extend timeline, ""
extend media, ""
extend list, ""
extend unsupported, ""
extend preferences, ""
extend resolver, ""
extend rss, ""
extend search, ""
extend timeline, ""
extend list, ""
extend status, ""
extend media, ""
extend embed, ""
extend debug, ""
extend unsupported, ""

View File

@@ -1,11 +1,9 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, options, times, math, tables
import strutils, options, tables, times, math
import packedjson, packedjson/deserialiser
import types, parserutils, utils
import experimental/parser/unifiedcard
proc parseGraphTweet(js: JsonNode): Tweet
proc parseUser(js: JsonNode; id=""): User =
if js.isNull: return
result = User(
@@ -21,46 +19,13 @@ proc parseUser(js: JsonNode; id=""): User =
tweets: js{"statuses_count"}.getInt,
likes: js{"favourites_count"}.getInt,
media: js{"media_count"}.getInt,
protected: js{"protected"}.getBool(js{"privacy", "protected"}.getBool),
verified: js{"verified"}.getBool,
protected: js{"protected"}.getBool,
joinDate: js{"created_at"}.getTime
)
if js{"is_blue_verified"}.getBool(false):
result.verifiedType = blue
with verifiedType, js{"verified_type"}:
result.verifiedType = parseEnum[VerifiedType](verifiedType.getStr)
result.expandUserEntities(js)
proc parseGraphUser(js: JsonNode): User =
var user = js{"user_result", "result"}
if user.isNull:
user = ? js{"user_results", "result"}
if user.isNull:
if js{"core"}.notNull and js{"legacy"}.notNull:
user = js
else:
return
result = parseUser(user{"legacy"}, user{"rest_id"}.getStr)
if result.verifiedType == none and user{"is_blue_verified"}.getBool(false):
result.verifiedType = blue
# fallback to support UserMedia/recent GraphQL updates
if result.username.len == 0:
result.username = user{"core", "screen_name"}.getStr
result.fullname = user{"core", "name"}.getStr
result.userPic = user{"avatar", "image_url"}.getImageStr.replace("_normal", "")
if user{"is_blue_verified"}.getBool(false):
result.verifiedType = blue
with verifiedType, user{"verification", "verified_type"}:
result.verifiedType = parseEnum[VerifiedType](verifiedType.getStr)
proc parseGraphList*(js: JsonNode): List =
if js.isNull: return
@@ -73,13 +38,14 @@ proc parseGraphList*(js: JsonNode): List =
result = List(
id: list{"id_str"}.getStr,
name: list{"name"}.getStr,
username: list{"user_results", "result", "legacy", "screen_name"}.getStr,
userId: list{"user_results", "result", "rest_id"}.getStr,
username: list{"user", "legacy", "screen_name"}.getStr,
userId: list{"user", "rest_id"}.getStr,
description: list{"description"}.getStr,
members: list{"member_count"}.getInt,
banner: list{"custom_banner_media", "media_info", "original_img_url"}.getImageStr
banner: list{"custom_banner_media", "media_info", "url"}.getImageStr
)
proc parsePoll(js: JsonNode): Poll =
let vals = js{"binding_values"}
# name format is pollNchoice_*
@@ -98,96 +64,34 @@ proc parsePoll(js: JsonNode): Poll =
result.leader = result.values.find(max(result.values))
result.votes = result.values.sum
proc parseVideoVariants(variants: JsonNode): seq[VideoVariant] =
result = @[]
for v in variants:
let
url = v{"url"}.getStr
contentType = parseEnum[VideoType](v{"content_type"}.getStr("video/mp4"))
bitrate = v{"bit_rate"}.getInt(v{"bitrate"}.getInt(0))
result.add VideoVariant(
contentType: contentType,
bitrate: bitrate,
url: url,
resolution: if contentType == mp4: getMp4Resolution(url) else: 0
)
proc parseGif(js: JsonNode): Gif =
result = Gif(
url: js{"video_info", "variants"}[0]{"url"}.getImageStr,
thumb: js{"media_url_https"}.getImageStr
)
proc parseVideo(js: JsonNode): Video =
result = Video(
thumb: js{"media_url_https"}.getImageStr,
available: true,
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr,
available: js{"ext_media_availability", "status"}.getStr == "available",
title: js{"ext_alt_text"}.getStr,
durationMs: js{"video_info", "duration_millis"}.getInt
# playbackType: mp4
)
with status, js{"ext_media_availability", "status"}:
if status.getStr.len > 0 and status.getStr.toLowerAscii != "available":
result.available = false
with title, js{"additional_media_info", "title"}:
result.title = title.getStr
with description, js{"additional_media_info", "description"}:
result.description = description.getStr
result.variants = parseVideoVariants(js{"video_info", "variants"})
proc parseLegacyMediaEntities(js: JsonNode; result: var Tweet) =
with jsMedia, js{"extended_entities", "media"}:
for m in jsMedia:
case m.getTypeName:
of "photo":
result.photos.add m{"media_url_https"}.getImageStr
of "video":
result.video = some(parseVideo(m))
with user, m{"additional_media_info", "source_user"}:
if user{"id"}.getInt > 0:
result.attribution = some(parseUser(user))
else:
result.attribution = some(parseGraphUser(user))
of "animated_gif":
result.gif = some Gif(
url: m{"video_info", "variants"}[0]{"url"}.getImageStr,
thumb: m{"media_url_https"}.getImageStr
)
else: discard
with url, m{"url"}:
if result.text.endsWith(url.getStr):
result.text.removeSuffix(url.getStr)
result.text = result.text.strip()
proc parseMediaEntities(js: JsonNode; result: var Tweet) =
with mediaEntities, js{"media_entities"}:
for mediaEntity in mediaEntities:
with mediaInfo, mediaEntity{"media_results", "result", "media_info"}:
case mediaInfo.getTypeName
of "ApiImage":
result.photos.add mediaInfo{"original_img_url"}.getImageStr
of "ApiVideo":
let status = mediaEntity{"media_results", "result", "media_availability_v2", "status"}
result.video = some Video(
available: status.getStr == "Available",
thumb: mediaInfo{"preview_image", "original_img_url"}.getImageStr,
durationMs: mediaInfo{"duration_millis"}.getInt,
variants: parseVideoVariants(mediaInfo{"variants"})
)
of "ApiGif":
result.gif = some Gif(
url: mediaInfo{"variants"}[0]{"url"}.getImageStr,
thumb: mediaInfo{"preview_image", "original_img_url"}.getImageStr
)
else: discard
# Remove media URLs from text
with mediaList, js{"legacy", "entities", "media"}:
for url in mediaList:
let expandedUrl = url.getExpandedUrl
if result.text.endsWith(expandedUrl):
result.text.removeSuffix(expandedUrl)
result.text = result.text.strip()
for v in js{"video_info", "variants"}:
result.variants.add VideoVariant(
contentType: parseEnum[VideoType](v{"content_type"}.getStr("summary")),
bitrate: v{"bitrate"}.getInt,
url: v{"url"}.getStr
)
proc parsePromoVideo(js: JsonNode): Video =
result = Video(
@@ -267,7 +171,7 @@ proc parseCard(js: JsonNode; urls: JsonNode): Card =
for u in ? urls:
if u{"url"}.getStr == result.url:
result.url = u.getExpandedUrl(result.url)
result.url = u{"expanded_url"}.getStr
break
if kind in {videoDirectMessage, imageDirectMessage}:
@@ -277,19 +181,14 @@ proc parseCard(js: JsonNode; urls: JsonNode): Card =
result.url.len == 0 or result.url.startsWith("card://"):
result.url = getPicUrl(result.image)
proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
proc parseTweet(js: JsonNode): Tweet =
if js.isNull: return
let time =
if js{"created_at"}.notNull: js{"created_at"}.getTime
else: js{"created_at_ms"}.getTimeFromMs
result = Tweet(
id: js{"id_str"}.getId,
threadId: js{"conversation_id_str"}.getId,
replyId: js{"in_reply_to_status_id_str"}.getId,
text: js{"full_text"}.getStr,
time: time,
time: js{"created_at"}.getTime,
hasThread: js{"self_thread"}.notNull,
available: true,
user: User(id: js{"user_id_str"}.getStr),
@@ -297,38 +196,20 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
replies: js{"reply_count"}.getInt,
retweets: js{"retweet_count"}.getInt,
likes: js{"favorite_count"}.getInt,
views: js{"views_count"}.getInt
quotes: js{"quote_count"}.getInt
)
)
# fix for pinned threads
if result.hasThread and result.threadId == 0:
result.threadId = js{"self_thread", "id_str"}.getId
result.expandTweetEntities(js)
if "retweeted_status" in js:
result.retweet = some Tweet()
elif js{"is_quote_status"}.getBool:
if js{"is_quote_status"}.getBool:
result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId)
# legacy
with rt, js{"retweeted_status_id_str"}:
result.retweet = some Tweet(id: rt.getId)
return
# graphql
with rt, js{"retweeted_status_result", "result"}:
# needed due to weird edgecase where the actual tweet data isn't included
if "legacy" in rt:
result.retweet = some parseGraphTweet(rt)
return
with reposts, js{"repostedStatusResults"}:
with rt, reposts{"result"}:
if "legacy" in rt:
result.retweet = some parseGraphTweet(rt)
return
if jsCard.kind != JNull:
with jsCard, js{"card"}:
let name = jsCard{"name"}.getStr
if "poll" in name:
if "image" in name:
@@ -340,8 +221,18 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
else:
result.card = some parseCard(jsCard, js{"entities", "urls"})
result.expandTweetEntities(js)
parseLegacyMediaEntities(js, result)
with jsMedia, js{"extended_entities", "media"}:
for m in jsMedia:
case m{"type"}.getStr
of "photo":
result.photos.add m{"media_url_https"}.getImageStr
of "video":
result.video = some(parseVideo(m))
with user, m{"additional_media_info", "source_user"}:
result.attribution = some(parseUser(user))
of "animated_gif":
result.gif = some(parseGif(m))
else: discard
with jsWithheld, js{"withheld_in_countries"}:
let withheldInCountries: seq[string] =
@@ -357,265 +248,159 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
result.text.removeSuffix(" Learn more.")
result.available = false
proc parseGraphTweet(js: JsonNode): Tweet =
if js.kind == JNull:
return Tweet()
case js.getTypeName:
of "TweetUnavailable":
return Tweet()
of "TweetTombstone":
with text, select(js{"tombstone", "richText"}, js{"tombstone", "text"}):
return Tweet(text: text.getTombstone)
return Tweet()
of "TweetPreviewDisplay":
return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.")
of "TweetWithVisibilityResults":
return parseGraphTweet(js{"tweet"})
else:
discard
if not js.hasKey("legacy"):
return Tweet()
var jsCard = select(js{"card"}, js{"tweet_card"}, js{"legacy", "tweet_card"})
if jsCard.kind != JNull:
let legacyCard = jsCard{"legacy"}
if legacyCard.kind != JNull:
let bindingArray = legacyCard{"binding_values"}
if bindingArray.kind == JArray:
var bindingObj: seq[(string, JsonNode)]
for item in bindingArray:
bindingObj.add((item{"key"}.getStr, item{"value"}))
# Create a new card object with flattened structure
jsCard = %*{
"name": legacyCard{"name"},
"url": legacyCard{"url"},
"binding_values": %bindingObj
}
result = parseTweet(js{"legacy"}, jsCard)
result.id = js{"rest_id"}.getId
result.user = parseGraphUser(js{"core"})
if result.replyId == 0:
result.replyId = js{"reply_to_results", "rest_id"}.getId
with count, js{"views", "count"}:
result.stats.views = count.getStr("0").parseInt
with noteTweet, js{"note_tweet", "note_tweet_results", "result"}:
result.expandNoteTweetEntities(noteTweet)
parseMediaEntities(js, result)
proc finalizeTweet(global: GlobalObjects; id: string): Tweet =
let intId = if id.len > 0: parseBiggestInt(id) else: 0
result = global.tweets.getOrDefault(id, Tweet(id: intId))
if result.quote.isSome:
result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"}))
let quote = get(result.quote).id
if $quote in global.tweets:
result.quote = some global.tweets[$quote]
else:
result.quote = some Tweet()
with quoted, js{"quotedPostResults", "result"}:
result.quote = some(parseGraphTweet(quoted))
if result.retweet.isSome:
let rt = get(result.retweet).id
if $rt in global.tweets:
result.retweet = some finalizeTweet(global, $rt)
else:
result.retweet = some Tweet()
proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
for t in ? js{"content", "items"}:
let entryId = t.getEntryId
if "cursor-showmore" in entryId:
let cursor = t{"item", "content", "value"}
result.thread.cursor = cursor.getStr
result.thread.hasMore = true
elif "tweet" in entryId and "promoted" notin entryId:
with tweet, t.getTweetResult("item"):
result.thread.content.add parseGraphTweet(tweet)
proc parsePin(js: JsonNode; global: GlobalObjects): Tweet =
let pin = js{"pinEntry", "entry", "entryId"}.getStr
if pin.len == 0: return
let tweetDisplayType = select(
t{"item", "content", "tweet_display_type"},
t{"item", "itemContent", "tweetDisplayType"}
)
if tweetDisplayType.getStr == "SelfThread":
result.self = true
let id = pin.getId
if id notin global.tweets: return
proc parseGraphTweetResult*(js: JsonNode): Tweet =
with tweet, js{"data", "tweet_result", "result"}:
result = parseGraphTweet(tweet)
global.tweets[id].pinned = true
return finalizeTweet(global, id)
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result = Conversation(replies: Result[Chain](beginning: true))
proc parseGlobalObjects(js: JsonNode): GlobalObjects =
result = GlobalObjects()
let
tweets = ? js{"globalObjects", "tweets"}
users = ? js{"globalObjects", "users"}
let instructions = ? select(
js{"data", "timelineResponse", "instructions"},
js{"data", "timeline_response", "instructions"},
js{"data", "threaded_conversation_with_injections_v2", "instructions"}
)
if instructions.len == 0:
return
for k, v in users:
result.users[k] = parseUser(v, k)
for i in instructions:
if i.getTypeName == "TimelineAddEntries":
for e in i{"entries"}:
let entryId = e.getEntryId
if entryId.startsWith("tweet"):
let tweetResult = getTweetResult(e)
if tweetResult.notNull:
let tweet = parseGraphTweet(tweetResult)
for k, v in tweets:
var tweet = parseTweet(v)
if tweet.user.id in result.users:
tweet.user = result.users[tweet.user.id]
result.tweets[k] = tweet
if not tweet.available:
tweet.id = entryId.getId
proc parseThread(js: JsonNode; global: GlobalObjects): tuple[thread: Chain, self: bool] =
result.thread = Chain()
if $tweet.id == tweetId:
result.tweet = tweet
else:
result.before.content.add tweet
elif entryId.startsWith("conversationthread"):
let (thread, self) = parseGraphThread(e)
if self:
result.after = thread
elif thread.content.len > 0:
result.replies.content.add thread
elif entryId.startsWith("tombstone"):
let
content = select(e{"content", "content"}, e{"content", "itemContent"})
tweet = Tweet(
id: entryId.getId,
available: false,
text: content{"tombstoneInfo", "richText"}.getTombstone
)
let thread = js{"content", "item", "content", "conversationThread"}
with cursor, thread{"showMoreCursor"}:
result.thread.cursor = cursor{"value"}.getStr
result.thread.hasMore = true
if $tweet.id == tweetId:
result.tweet = tweet
else:
result.before.content.add tweet
elif entryId.startsWith("cursor-bottom"):
var cursorValue = select(
e{"content", "value"},
e{"content", "content", "value"},
e{"content", "itemContent", "value"}
)
result.replies.bottom = cursorValue.getStr
for t in thread{"conversationComponents"}:
let content = t{"conversationTweetComponent", "tweet"}
proc extractTweetsFromEntry*(e: JsonNode): seq[Tweet] =
with tweetResult, getTweetResult(e):
var tweet = parseGraphTweet(tweetResult)
if content{"displayType"}.getStr == "SelfThread":
result.self = true
var tweet = finalizeTweet(global, content{"id"}.getStr)
if not tweet.available:
tweet.id = e.getEntryId.getId
result.add tweet
return
tweet.tombstone = getTombstone(content{"tombstone"})
result.thread.content.add tweet
for item in e{"content", "items"}:
with tweetResult, item.getTweetResult("item"):
var tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = item.getEntryId.getId
result.add tweet
proc parseConversation*(js: JsonNode; tweetId: string): Conversation =
result = Conversation(replies: Result[Chain](beginning: true))
let global = parseGlobalObjects(? js)
proc parseGraphTimeline*(js: JsonNode; after=""): Profile =
result = Profile(tweets: Timeline(beginning: after.len == 0))
let instructions = ? select(
js{"data", "list", "timeline_response", "timeline", "instructions"},
js{"data", "user", "result", "timeline", "timeline", "instructions"},
js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
)
let instructions = ? js{"timeline", "instructions"}
if instructions.len == 0:
return
for i in instructions:
if i{"moduleItems"}.notNull:
for item in i{"moduleItems"}:
with tweetResult, item.getTweetResult("item"):
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = item.getEntryId.getId
result.tweets.content.add tweet
continue
for e in instructions[0]{"addEntries", "entries"}:
let entry = e{"entryId"}.getStr
if "tweet" in entry or "tombstone" in entry:
let tweet = finalizeTweet(global, e.getEntryId)
if $tweet.id != tweetId:
result.before.content.add tweet
else:
result.tweet = tweet
elif "conversationThread" in entry:
let (thread, self) = parseThread(e, global)
if thread.content.len > 0:
if self:
result.after = thread
else:
result.replies.content.add thread
elif "cursor-showMore" in entry:
result.replies.bottom = e.getCursor
elif "cursor-bottom" in entry:
result.replies.bottom = e.getCursor
if i{"entries"}.notNull:
for e in i{"entries"}:
let entryId = e.getEntryId
if entryId.startsWith("tweet") or entryId.startsWith("profile-grid"):
for tweet in extractTweetsFromEntry(e):
result.tweets.content.add tweet
elif "-conversation-" in entryId or entryId.startsWith("homeConversation"):
let (thread, self) = parseGraphThread(e)
result.tweets.content.add thread.content
elif entryId.startsWith("cursor-bottom"):
result.tweets.bottom = e{"content", "value"}.getStr
proc parseStatus*(js: JsonNode): Tweet =
with e, js{"errors"}:
if e.getError == tweetNotFound:
return
if after.len == 0:
if i.getTypeName == "TimelinePinEntry":
let tweets = extractTweetsFromEntry(i{"entry"})
if tweets.len > 0:
var tweet = tweets[0]
tweet.pinned = true
result.pinned = some tweet
result = parseTweet(js)
if not result.isNil:
result.user = parseUser(js{"user"})
proc parseGraphPhotoRail*(js: JsonNode): PhotoRail =
result = @[]
with quote, js{"quoted_status"}:
result.quote = some parseStatus(js{"quoted_status"})
let instructions = select(
js{"data", "user", "result", "timeline", "timeline", "instructions"},
js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
)
if instructions.len == 0:
proc parseInstructions[T](res: var Result[T]; global: GlobalObjects; js: JsonNode) =
if js.kind != JArray or js.len == 0:
return
for i in instructions:
if i{"moduleItems"}.notNull:
for item in i{"moduleItems"}:
with tweetResult, item.getTweetResult("item"):
let t = parseGraphTweet(tweetResult)
if not t.available:
t.id = item.getEntryId.getId
for i in js:
when T is Tweet:
if res.beginning and i{"pinEntry"}.notNull:
with pin, parsePin(i, global):
res.content.add pin
let photo = extractGalleryPhoto(t)
if photo.url.len > 0:
result.add photo
with r, i{"replaceEntry", "entry"}:
if "top" in r{"entryId"}.getStr:
res.top = r.getCursor
elif "bottom" in r{"entryId"}.getStr:
res.bottom = r.getCursor
if result.len == 16:
return
continue
proc parseTimeline*(js: JsonNode; after=""): Timeline =
result = Timeline(beginning: after.len == 0)
let global = parseGlobalObjects(? js)
if i.getTypeName != "TimelineAddEntries":
continue
let instructions = ? js{"timeline", "instructions"}
if instructions.len == 0: return
for e in i{"entries"}:
let entryId = e.getEntryId
if entryId.startsWith("tweet") or entryId.startsWith("profile-grid"):
for t in extractTweetsFromEntry(e):
let photo = extractGalleryPhoto(t)
if photo.url.len > 0:
result.add photo
result.parseInstructions(global, instructions)
if result.len == 16:
return
for e in instructions[0]{"addEntries", "entries"}:
let entry = e{"entryId"}.getStr
if "tweet" in entry or entry.startsWith("sq-I-t") or "tombstone" in entry:
let tweet = finalizeTweet(global, e.getEntryId)
if not tweet.available: continue
result.content.add tweet
elif "cursor-top" in entry:
result.top = e.getCursor
elif "cursor-bottom" in entry:
result.bottom = e.getCursor
elif entry.startsWith("sq-C"):
with cursor, e{"content", "operation", "cursor"}:
if cursor{"cursorType"}.getStr == "Bottom":
result.bottom = cursor{"value"}.getStr
else:
result.top = cursor{"value"}.getStr
proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
result = Result[T](beginning: after.len == 0)
proc parsePhotoRail*(js: JsonNode): PhotoRail =
for tweet in js:
let
t = parseTweet(tweet)
url = if t.photos.len > 0: t.photos[0]
elif t.video.isSome: get(t.video).thumb
elif t.gif.isSome: get(t.gif).thumb
elif t.card.isSome: get(t.card).image
else: ""
let instructions = select(
js{"data", "search", "timeline_response", "timeline", "instructions"},
js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
)
if instructions.len == 0:
return
for instruction in instructions:
let typ = getTypeName(instruction)
if typ == "TimelineAddEntries":
for e in instruction{"entries"}:
let entryId = e.getEntryId
when T is Tweets:
if entryId.startsWith("tweet"):
with tweetRes, getTweetResult(e):
let tweet = parseGraphTweet(tweetRes)
if not tweet.available:
tweet.id = entryId.getId
result.content.add tweet
elif T is User:
if entryId.startsWith("user"):
with userRes, e{"content", "itemContent"}:
result.content.add parseGraphUser(userRes)
if entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
elif typ == "TimelineReplaceEntry":
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
result.bottom = instruction{"entry", "content", "value"}.getStr
if url.len == 0: continue
result.add GalleryPhoto(url: url, tweetId: $t.id)

View File

@@ -1,17 +1,9 @@
# SPDX-License-Identifier: AGPL-3.0-only
import std/[times, macros, htmlgen, options, algorithm, re]
import std/strutils except escape
import std/[strutils, times, macros, htmlgen, options, algorithm, re]
import std/unicode except strip
from xmltree import escape
import packedjson
import types, utils, formatters
const
unicodeOpen = "\uFFFA"
unicodeClose = "\uFFFB"
xmlOpen = escape("<")
xmlClose = escape(">")
let
unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
unReplace = "$1<a href=\"/$2\">@$2</a>"
@@ -36,19 +28,13 @@ template `?`*(js: JsonNode): untyped =
if j.isNull: return
j
template select*(a, b: JsonNode): untyped =
if a.notNull: a else: b
template select*(a, b, c: JsonNode): untyped =
if a.notNull: a elif b.notNull: b else: c
template with*(ident, value, body): untyped =
if true:
template `with`*(ident, value, body): untyped =
block:
let ident {.inject.} = value
if ident != nil: body
template with*(ident; value: JsonNode; body): untyped =
if true:
template `with`*(ident; value: JsonNode; body): untyped =
block:
let ident {.inject.} = value
if value.notNull: body
@@ -59,20 +45,6 @@ template getError*(js: JsonNode): Error =
if js.kind != JArray or js.len == 0: null
else: Error(js[0]{"code"}.getInt)
proc getTweetResult*(js: JsonNode; root="content"): JsonNode =
select(
js{root, "content", "tweet_results", "result"},
js{root, "itemContent", "tweet_results", "result"},
js{root, "content", "tweetResult", "result"}
)
template getTypeName*(js: JsonNode): string =
js{"__typename"}.getStr(js{"type"}.getStr)
template getEntryId*(e: JsonNode): string =
e{"entryId"}.getStr(e{"entry_id"}.getStr)
template parseTime(time: string; f: static string; flen: int): DateTime =
if time.len != flen: return
parse(time, f, utc())
@@ -83,24 +55,29 @@ proc getDateTime*(js: JsonNode): DateTime =
proc getTime*(js: JsonNode): DateTime =
parseTime(js.getStr, "ddd MMM dd hh:mm:ss \'+0000\' yyyy", 30)
proc getTimeFromMs*(js: JsonNode): DateTime =
let ms = js.getInt(0)
if ms == 0: return
let seconds = ms div 1000
return fromUnix(seconds).utc()
proc getId*(id: string): int64 {.inline.} =
proc getId*(id: string): string {.inline.} =
let start = id.rfind("-")
if start < 0:
return parseBiggestInt(id)
return parseBiggestInt(id[start + 1 ..< id.len])
if start < 0: return id
id[start + 1 ..< id.len]
proc getId*(js: JsonNode): int64 {.inline.} =
case js.kind
of JString: return js.getStr("0").getId
of JString: return parseBiggestInt(js.getStr("0"))
of JInt: return js.getBiggestInt()
else: return 0
proc getEntryId*(js: JsonNode): string {.inline.} =
let entry = js{"entryId"}.getStr
if entry.len == 0: return
if "tweet" in entry or "sq-I-t" in entry:
return entry.getId
elif "tombstone" in entry:
return js{"content", "item", "content", "tombstone", "tweet", "id"}.getStr
else:
echo "unknown entry: ", entry
return
template getStrVal*(js: JsonNode; default=""): string =
js{"string_value"}.getStr(default)
@@ -112,9 +89,6 @@ proc getImageStr*(js: JsonNode): string =
template getImageVal*(js: JsonNode): string =
js{"image_value", "url"}.getImageStr
template getExpandedUrl*(js: JsonNode; fallback=""): string =
js{"expanded_url"}.getStr(js{"url"}.getStr(fallback))
proc getCardUrl*(js: JsonNode; kind: CardKind): string =
result = js{"website_url"}.getStrVal
if kind == promoVideoConvo:
@@ -156,31 +130,16 @@ proc getBanner*(js: JsonNode): string =
return
proc getTombstone*(js: JsonNode): string =
result = js{"text"}.getStr
result = js{"tombstoneInfo", "richText", "text"}.getStr
result.removeSuffix(" Learn more")
proc getMp4Resolution*(url: string): int =
# parses the height out of a URL like this one:
# https://video.twimg.com/ext_tw_video/<tweet-id>/pu/vid/720x1280/<random>.mp4
const vidSep = "/vid/"
let
vidIdx = url.find(vidSep) + vidSep.len
resIdx = url.find('x', vidIdx) + 1
res = url[resIdx ..< url.find("/", resIdx)]
try:
return parseInt(res)
except ValueError:
# cannot determine resolution (e.g. m3u8/non-mp4 video)
return 0
proc extractSlice(js: JsonNode): Slice[int] =
result = js["indices"][0].getInt ..< js["indices"][1].getInt
proc extractUrls(result: var seq[ReplaceSlice]; js: JsonNode;
textLen: int; hideTwitter = false) =
let
url = js.getExpandedUrl
url = js["expanded_url"].getStr
slice = js.extractSlice
if hideTwitter and slice.b.succ >= textLen and url.isTwitterUrl:
@@ -241,7 +200,7 @@ proc expandUserEntities*(user: var User; js: JsonNode) =
ent = ? js{"entities"}
with urls, ent{"url", "urls"}:
user.website = urls[0].getExpandedUrl
user.website = urls[0]{"expanded_url"}.getStr
var replacements = newSeq[ReplaceSlice]()
@@ -256,37 +215,47 @@ proc expandUserEntities*(user: var User; js: JsonNode) =
user.bio = user.bio.replacef(unRegex, unReplace)
.replacef(htRegex, htReplace)
proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlice: Slice[int];
replyTo=""; hasRedundantLink=false) =
let hasCard = tweet.card.isSome
proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
let
orig = tweet.text.toRunes
textRange = js{"display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt
hasQuote = js{"is_quote_status"}.getBool
hasCard = tweet.card.isSome
var replyTo = ""
if tweet.replyId != 0:
with reply, js{"in_reply_to_screen_name"}:
tweet.reply.add reply.getStr
replyTo = reply.getStr
let ent = ? js{"entities"}
var replacements = newSeq[ReplaceSlice]()
with urls, entities{"urls"}:
with urls, ent{"urls"}:
for u in urls:
let urlStr = u["url"].getStr
if urlStr.len == 0 or urlStr notin text:
if urlStr.len == 0 or urlStr notin tweet.text:
continue
replacements.extractUrls(u, textSlice.b, hideTwitter = hasRedundantLink)
replacements.extractUrls(u, textSlice.b, hideTwitter = hasQuote)
if hasCard and u{"url"}.getStr == get(tweet.card).url:
get(tweet.card).url = u.getExpandedUrl
get(tweet.card).url = u{"expanded_url"}.getStr
with media, entities{"media"}:
with media, ent{"media"}:
for m in media:
replacements.extractUrls(m, textSlice.b, hideTwitter = true)
if "hashtags" in entities:
for hashtag in entities["hashtags"]:
if "hashtags" in ent:
for hashtag in ent["hashtags"]:
replacements.extractHashtags(hashtag)
if "symbols" in entities:
for symbol in entities["symbols"]:
if "symbols" in ent:
for symbol in ent["symbols"]:
replacements.extractHashtags(symbol)
if "user_mentions" in entities:
for mention in entities["user_mentions"]:
if "user_mentions" in ent:
for mention in ent["user_mentions"]:
let
name = mention{"screen_name"}.getStr
slice = mention.extractSlice
@@ -303,40 +272,5 @@ proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlic
replacements.deduplicate
replacements.sort(cmp)
tweet.text = text.toRunes.replacedWith(replacements, textSlice).strip(leading=false)
proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
let
entities = ? js{"entities"}
textRange = js{"display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt
hasQuote = js{"is_quote_status"}.getBool
hasJobCard = tweet.card.isSome and get(tweet.card).kind == jobDetails
var replyTo = ""
if tweet.replyId != 0:
with reply, js{"in_reply_to_screen_name"}:
replyTo = reply.getStr
tweet.reply.add replyTo
tweet.expandTextEntities(entities, tweet.text, textSlice, replyTo, hasQuote or hasJobCard)
proc expandNoteTweetEntities*(tweet: Tweet; js: JsonNode) =
let
entities = ? js{"entity_set"}
text = js{"text"}.getStr.multiReplace(("<", unicodeOpen), (">", unicodeClose))
textSlice = 0..text.runeLen
tweet.expandTextEntities(entities, text, textSlice)
tweet.text = tweet.text.multiReplace((unicodeOpen, xmlOpen), (unicodeClose, xmlClose))
proc extractGalleryPhoto*(t: Tweet): GalleryPhoto =
let url =
if t.photos.len > 0: t.photos[0]
elif t.video.isSome: get(t.video).thumb
elif t.gif.isSome: get(t.gif).thumb
elif t.card.isSome: get(t.card).image
else: ""
result = GalleryPhoto(url: url, tweetId: $t.id)
tweet.text = orig.replacedWith(replacements, textSlice)
.strip(leading=false)

View File

@@ -83,7 +83,7 @@ genPrefs:
"Enable mp4 video playback (only for gifs)"
hlsPlayback(checkbox, false):
"Enable HLS video streaming (requires JavaScript)"
"Enable hls video streaming (requires JavaScript)"
proxyVideos(checkbox, true):
"Proxy video streaming through the server (might be slow)"
@@ -107,6 +107,10 @@ genPrefs:
"Reddit -> Teddit/Libreddit"
placeholder: "Teddit hostname"
replaceInstagram(input, ""):
"Instagram -> Bibliogram"
placeholder: "Bibliogram hostname"
iterator allPrefs*(): Pref =
for k, v in prefList:
for pref in v:

View File

@@ -6,9 +6,10 @@ import types
const
validFilters* = @[
"media", "images", "twimg", "videos",
"native_video", "consumer_video", "spaces",
"native_video", "consumer_video", "pro_video",
"links", "news", "quote", "mentions",
"replies", "retweets", "nativeretweets"
"replies", "retweets", "nativeretweets",
"verified", "safe"
]
emptyQuery* = "include:nativeretweets"
@@ -17,11 +18,6 @@ template `@`(param: string): untyped =
if param in pms: pms[param]
else: ""
proc validateNumber(value: string): string =
if value.anyIt(not it.isDigit):
return ""
return value
proc initQuery*(pms: Table[string, string]; name=""): Query =
result = Query(
kind: parseEnum[QueryKind](@"f", tweets),
@@ -30,7 +26,7 @@ proc initQuery*(pms: Table[string, string]; name=""): Query =
excludes: validFilters.filterIt("e-" & it in pms),
since: @"since",
until: @"until",
minLikes: validateNumber(@"min_faves")
near: @"near"
)
if name.len > 0:
@@ -64,7 +60,7 @@ proc genQueryParam*(query: Query): string =
param &= "OR "
if query.fromUser.len > 0 and query.kind in {posts, media}:
param &= "filter:self_threads OR -filter:replies "
param &= "filter:self_threads OR-filter:replies "
if "nativeretweets" notin query.excludes:
param &= "include:nativeretweets "
@@ -82,8 +78,8 @@ proc genQueryParam*(query: Query): string =
result &= " since:" & query.since
if query.until.len > 0:
result &= " until:" & query.until
if query.minLikes.len > 0:
result &= " min_faves:" & query.minLikes
if query.near.len > 0:
result &= &" near:\"{query.near}\" within:15mi"
if query.text.len > 0:
if result.len > 0:
result &= " " & query.text
@@ -97,18 +93,18 @@ proc genQueryUrl*(query: Query): string =
if query.text.len > 0:
params.add "q=" & encodeUrl(query.text)
for f in query.filters:
params.add &"f-{f}=on"
params.add "f-" & f & "=on"
for e in query.excludes:
params.add &"e-{e}=on"
params.add "e-" & e & "=on"
for i in query.includes.filterIt(it != "nativeretweets"):
params.add &"i-{i}=on"
params.add "i-" & i & "=on"
if query.since.len > 0:
params.add "since=" & query.since
if query.until.len > 0:
params.add "until=" & query.until
if query.minLikes.len > 0:
params.add "min_faves=" & query.minLikes
if query.near.len > 0:
params.add "near=" & query.near
if params.len > 0:
result &= params.join("&")

View File

@@ -52,7 +52,6 @@ proc initRedisPool*(cfg: Config) {.async.} =
await migrate("profileDates", "p:*")
await migrate("profileStats", "p:*")
await migrate("userType", "p:*")
await migrate("verifiedType", "p:*")
pool.withAcquire(r):
# optimize memory usage for user ID buckets
@@ -86,7 +85,7 @@ proc cache*(data: List) {.async.} =
await setEx(data.listKey, listCacheTime, compress(toFlatty(data)))
proc cache*(data: PhotoRail; name: string) {.async.} =
await setEx("pr2:" & toLower(name), baseCacheTime * 2, compress(toFlatty(data)))
await setEx("pr:" & toLower(name), baseCacheTime, compress(toFlatty(data)))
proc cache*(data: User) {.async.} =
if data.username.len == 0: return
@@ -119,11 +118,11 @@ proc getUserId*(username: string): Future[string] {.async.} =
pool.withAcquire(r):
result = await r.hGet(name.uidKey, name)
if result == redisNil:
let user = await getGraphUser(username)
let user = await getUser(username)
if user.suspended:
return "suspended"
else:
await all(cacheUserId(name, user.id), cache(user))
await cacheUserId(name, user.id)
return user.id
proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
@@ -131,7 +130,8 @@ proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
if prof != redisNil:
prof.deserialize(User)
elif fetch:
result = await getGraphUser(username)
let userId = await getUserId(username)
result = await getGraphUser(userId)
await cache(result)
proc getCachedUsername*(userId: string): Future[string] {.async.} =
@@ -142,30 +142,28 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
if username != redisNil:
result = username
else:
let user = await getGraphUserById(userId)
let user = await getUserById(userId)
result = user.username
await setEx(key, baseCacheTime, result)
if result.len > 0 and user.id.len > 0:
await all(cacheUserId(result, user.id), cache(user))
# proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
# if id == 0: return
# let tweet = await get(id.tweetKey)
# if tweet != redisNil:
# tweet.deserialize(Tweet)
# else:
# result = await getGraphTweetResult($id)
# if not result.isNil:
# await cache(result)
proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
if id == 0: return
let tweet = await get(id.tweetKey)
if tweet != redisNil:
tweet.deserialize(Tweet)
else:
result = await getStatus($id)
if result.isNil:
await cache(result)
proc getCachedPhotoRail*(id: string): Future[PhotoRail] {.async.} =
if id.len == 0: return
let rail = await get("pr2:" & toLower(id))
proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let rail = await get("pr:" & toLower(name))
if rail != redisNil:
rail.deserialize(PhotoRail)
else:
result = await getPhotoRail(id)
await cache(result, id)
result = await getPhotoRail(name)
await cache(result, name)
proc getCachedList*(username=""; slug=""; id=""): Future[List] {.async.} =
let list = if id.len == 0: redisNil

View File

@@ -1,13 +1,10 @@
# SPDX-License-Identifier: AGPL-3.0-only
import jester
import router_utils
import ".."/[auth, types]
import ".."/[tokens, types]
proc createDebugRouter*(cfg: Config) =
router debug:
get "/.health":
respJson getSessionPoolHealth()
get "/.sessions":
get "/.tokens":
cond cfg.enableDebug
respJson getSessionPoolDebug()
respJson getPoolJson()

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, strutils, strformat, options
import asyncdispatch, strutils, options
import jester, karax/vdom
import ".."/[types, api]
import ../views/[embed, tweet, general]
@@ -10,27 +10,27 @@ export api, embed, vdom, tweet, general, router_utils
proc createEmbedRouter*(cfg: Config) =
router embed:
get "/i/videos/tweet/@id":
let tweet = await getGraphTweetResult(@"id")
if tweet == nil or tweet.video.isNone:
let convo = await getTweet(@"id")
if convo == nil or convo.tweet == nil or convo.tweet.video.isNone:
resp Http404
resp renderVideoEmbed(tweet, cfg, request)
resp renderVideoEmbed(convo.tweet, cfg, request)
get "/@user/status/@id/embed":
let
tweet = await getGraphTweetResult(@"id")
convo = await getTweet(@"id")
prefs = cookiePrefs()
path = getPath()
if tweet == nil:
if convo == nil or convo.tweet == nil:
resp Http404
resp renderTweetEmbed(tweet, path, prefs, cfg, request)
resp $renderTweetEmbed(convo.tweet, path, prefs, cfg, request)
get "/embed/Tweet.html":
let id = @"id"
if id.len > 0:
redirect(&"/i/status/{id}/embed")
redirect("/i/status/" & id & "/embed")
else:
resp Http404

View File

@@ -1,25 +1,23 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, uri
import strutils, uri
import jester
import router_utils
import ".."/[types, redis_cache, api]
import ../views/[general, timeline, list]
export getListTimeline, getGraphList
template respList*(list, timeline, title, vnode: typed) =
if list.id.len == 0 or list.name.len == 0:
resp Http404, showError(&"""List "{@"id"}" not found""", cfg)
resp Http404, showError("List " & @"id" & " not found", cfg)
let
html = renderList(vnode, timeline.query, list)
rss = &"""/i/lists/{@"id"}/rss"""
rss = "/i/lists/$1/rss" % [@"id"]
resp renderMain(html, request, cfg, prefs, titleText=title, rss=rss, banner=list.banner)
proc title*(list: List): string =
&"@{list.username}/{list.name}"
proc createListRouter*(cfg: Config) =
router list:
get "/@name/lists/@slug/?":
@@ -30,22 +28,24 @@ proc createListRouter*(cfg: Config) =
slug = decodeUrl(@"slug")
list = await getCachedList(@"name", slug)
if list.id.len == 0:
resp Http404, showError(&"""List "{@"slug"}" not found""", cfg)
redirect(&"/i/lists/{list.id}")
resp Http404, showError("List \"" & @"slug" & "\" not found", cfg)
redirect("/i/lists/" & list.id)
get "/i/lists/@id/?":
cond '.' notin @"id"
let
prefs = cookiePrefs()
list = await getCachedList(id=(@"id"))
timeline = await getGraphListTweets(list.id, getCursor())
title = "@" & list.username & "/" & list.name
timeline = await getListTimeline(list.id, getCursor())
vnode = renderTimelineTweets(timeline, prefs, request.path)
respList(list, timeline, list.title, vnode)
respList(list, timeline, title, vnode)
get "/i/lists/@id/members":
cond '.' notin @"id"
let
prefs = cookiePrefs()
list = await getCachedList(id=(@"id"))
title = "@" & list.username & "/" & list.name
members = await getGraphListMembers(list, getCursor())
respList(list, members, list.title, renderTimelineUsers(members, prefs, request.path))
respList(list, members, title, renderTimelineUsers(members, prefs, request.path))

View File

@@ -37,8 +37,6 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
try:
let res = await client.get(url)
if res.status != "200 OK":
if res.status != "404 Not Found":
echo "[media] Proxying failed, status: $1, url: $2" % [res.status, url]
return Http404
let hashed = $hash(url)
@@ -52,10 +50,10 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
""
let headers = newHttpHeaders({
"content-type": res.headers["content-type", 0],
"content-length": contentLength,
"cache-control": maxAge,
"etag": hashed
"Content-Type": res.headers["content-type", 0],
"Content-Length": contentLength,
"Cache-Control": maxAge,
"ETag": hashed
})
respond(request, headers)
@@ -67,7 +65,6 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
await request.client.send(data)
data.setLen 0
except HttpRequestError, ProtocolError, OSError:
echo "[media] Proxying exception, error: $1, url: $2" % [getCurrentExceptionMsg(), url]
result = Http404
finally:
client.close()
@@ -91,20 +88,6 @@ proc createMediaRouter*(cfg: Config) =
get "/pic/?":
resp Http404
get re"^\/pic\/orig\/(enc)?\/?(.+)":
var url = decoded(request, 1)
if "twimg.com" notin url:
url.insert(twimg)
if not url.startsWith(https):
url.insert(https)
url.add("?name=orig")
let uri = parseUri(url)
cond isTwitterUrl(uri) == true
let code = await proxyMedia(request, url)
check code
get re"^\/pic\/(enc)?\/?(.+)":
var url = decoded(request, 1)
if "twimg.com" notin url:
@@ -123,7 +106,7 @@ proc createMediaRouter*(cfg: Config) =
cond "http" in url
if getHmac(url) != request.matches[1]:
resp Http403, showError("Failed to verify signature", cfg)
resp showError("Failed to verify signature", cfg)
if ".mp4" in url or ".ts" in url or ".m4s" in url:
let code = await proxyMedia(request, url)

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, tables, times, hashes, uri
import asyncdispatch, strutils, tables, times, hashes, uri
import jester
@@ -10,11 +10,6 @@ include "../views/rss.nimf"
export times, hashes
proc redisKey*(page, name, cursor: string): string =
result = page & ":" & name
if cursor.len > 0:
result &= ":" & cursor
proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.} =
var profile: Profile
let
@@ -23,16 +18,18 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
names = getNames(name)
if names.len == 1:
profile = await fetchProfile(after, query, skipRail=true)
profile = await fetchProfile(after, query, skipRail=true, skipPinned=true)
else:
var q = query
q.fromUser = names
profile.tweets = await getGraphTweetSearch(q, after)
# this is kinda dumb
profile.user = User(
username: name,
fullname: names.join(" | "),
userpic: "https://abs.twimg.com/sticky/default_profile_images/default_profile.png"
profile = Profile(
tweets: await getSearch[Tweet](q, after),
# this is kinda dumb
user: User(
username: name,
fullname: names.join(" | "),
userpic: "https://abs.twimg.com/sticky/default_profile_images/default_profile.png"
)
)
if profile.user.suspended:
@@ -45,8 +42,8 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
template respRss*(rss, page) =
if rss.cursor.len == 0:
let info = case page
of "User": " \"" & @"name" & "\" "
of "List": " \"" & @"id" & "\" "
of "User": " \"$1\" " % @"name"
of "List": " $1 " % @"id"
else: " "
resp Http404, showError(page & info & "not found", cfg)
@@ -70,13 +67,13 @@ proc createRssRouter*(cfg: Config) =
let
cursor = getCursor()
key = redisKey("search", $hash(genQueryUrl(query)), cursor)
key = "search:" & $hash(genQueryUrl(query)) & ":" & cursor
var rss = await getCachedRss(key)
if rss.cursor.len > 0:
respRss(rss, "Search")
let tweets = await getGraphTweetSearch(query, cursor)
let tweets = await getSearch[Tweet](query, cursor)
rss.cursor = tweets.bottom
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)
@@ -87,8 +84,9 @@ proc createRssRouter*(cfg: Config) =
cond cfg.enableRss
cond '.' notin @"name"
let
cursor = getCursor()
name = @"name"
key = redisKey("twitter", name, getCursor())
key = "twitter:" & name & ":" & cursor
var rss = await getCachedRss(key)
if rss.cursor.len > 0:
@@ -103,20 +101,18 @@ proc createRssRouter*(cfg: Config) =
cond cfg.enableRss
cond '.' notin @"name"
cond @"tab" in ["with_replies", "media", "search"]
let
name = @"name"
tab = @"tab"
query =
case tab
of "with_replies": getReplyQuery(name)
of "media": getMediaQuery(name)
of "search": initQuery(params(request), name=name)
else: Query(fromUser: @[name])
let name = @"name"
let query =
case @"tab"
of "with_replies": getReplyQuery(name)
of "media": getMediaQuery(name)
of "search": initQuery(params(request), name=name)
else: Query(fromUser: @[name])
let searchKey = if tab != "search": ""
else: ":" & $hash(genQueryUrl(query))
let key = redisKey(tab, name & searchKey, getCursor())
var key = @"tab" & ":" & @"name" & ":"
if @"tab" == "search":
key &= $hash(genQueryUrl(query)) & ":"
key &= getCursor()
var rss = await getCachedRss(key)
if rss.cursor.len > 0:
@@ -147,17 +143,18 @@ proc createRssRouter*(cfg: Config) =
get "/i/lists/@id/rss":
cond cfg.enableRss
let
id = @"id"
cursor = getCursor()
key = redisKey("lists", id, cursor)
key =
if cursor.len == 0: "lists:" & @"id"
else: "lists:" & @"id" & ":" & cursor
var rss = await getCachedRss(key)
if rss.cursor.len > 0:
respRss(rss, "List")
let
list = await getCachedList(id=id)
timeline = await getGraphListTweets(list.id, cursor)
list = await getCachedList(id=(@"id"))
timeline = await getListTimeline(list.id, cursor)
rss.cursor = timeline.bottom
rss.feed = renderListRss(timeline.content, list, cfg)

View File

@@ -14,31 +14,25 @@ export search
proc createSearchRouter*(cfg: Config) =
router search:
get "/search/?":
let q = @"q"
if q.len > 500:
if @"q".len > 500:
resp Http400, showError("Search input too long.", cfg)
let
prefs = cookiePrefs()
query = initQuery(params(request))
title = "Search" & (if q.len > 0: " (" & q & ")" else: "")
case query.kind
of users:
if "," in q:
redirect("/" & q)
var users: Result[User]
try:
users = await getGraphUserSearch(query, getCursor())
except InternalError:
users = Result[User](beginning: true, query: query)
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
if "," in @"q":
redirect("/" & @"q")
let users = await getSearch[User](query, getCursor())
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs)
of tweets:
let
tweets = await getGraphTweetSearch(query, getCursor())
tweets = await getSearch[Tweet](query, getCursor())
rss = "/search/rss?" & genQueryUrl(query)
resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
request, cfg, prefs, title, rss=rss)
request, cfg, prefs, rss=rss)
else:
resp Http404, showError("Invalid search", cfg)
@@ -48,4 +42,4 @@ proc createSearchRouter*(cfg: Config) =
get "/opensearch":
let url = getUrlPrefix(cfg) & "/search?q="
resp Http200, {"Content-Type": "application/opensearchdescription+xml"},
generateOpenSearchXML(cfg.title, cfg.hostname, url)
generateOpenSearchXML(cfg.title, cfg.hostname, url)

View File

@@ -16,21 +16,19 @@ proc createStatusRouter*(cfg: Config) =
router status:
get "/@name/status/@id/?":
cond '.' notin @"name"
let id = @"id"
if id.len > 19 or id.any(c => not c.isDigit):
resp Http404, showError("Invalid tweet ID", cfg)
cond not @"id".any(c => not c.isDigit)
let prefs = cookiePrefs()
# used for the infinite scroll feature
if @"scroll".len > 0:
let replies = await getReplies(id, getCursor())
let replies = await getReplies(@"id", getCursor())
if replies.content.len == 0:
resp Http404, ""
resp $renderReplies(replies, prefs, getPath())
let conv = await getTweet(id, getCursor())
let conv = await getTweet(@"id", getCursor())
if conv == nil:
echo "nil conv"
if conv == nil or conv.tweet == nil or conv.tweet.id == 0:
var error = "Tweet not found"
@@ -66,7 +64,7 @@ proc createStatusRouter*(cfg: Config) =
get "/@name/@s/@id/@m/?@i?":
cond @"s" in ["status", "statuses"]
cond @"m" in ["video", "photo", "history"]
cond @"m" in ["video", "photo"]
redirect("/$1/status/$2" % [@"name", @"id"])
get "/@name/statuses/@id/?":
@@ -74,6 +72,3 @@ proc createStatusRouter*(cfg: Config) =
get "/i/web/status/@id":
redirect("/i/status/" & @"id")
get "/@name/thread/@id/?":
redirect("/$1/status/$2" % [@"name", @"id"])

View File

@@ -27,7 +27,8 @@ template skipIf[T](cond: bool; default; body: Future[T]): Future[T] =
else:
body
proc fetchProfile*(after: string; query: Query; skipRail=false): Future[Profile] {.async.} =
proc fetchProfile*(after: string; query: Query; skipRail=false;
skipPinned=false): Future[Profile] {.async.} =
let
name = query.fromUser[0]
userId = await getUserId(name)
@@ -44,21 +45,36 @@ proc fetchProfile*(after: string; query: Query; skipRail=false): Future[Profile]
after.setLen 0
let
timeline =
case query.kind
of posts: getTimeline(userId, after)
of replies: getTimeline(userId, after, replies=true)
of media: getMediaTimeline(userId, after)
else: getSearch[Tweet](query, after)
rail =
skipIf(skipRail or query.kind == media, @[]):
getCachedPhotoRail(userId)
getCachedPhotoRail(name)
user = getCachedUser(name)
user = await getCachedUser(name)
result =
case query.kind
of posts: await getGraphUserTweets(userId, TimelineKind.tweets, after)
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after)
of media: await getGraphUserTweets(userId, TimelineKind.media, after)
else: Profile(tweets: await getGraphTweetSearch(query, after))
var pinned: Option[Tweet]
if not skipPinned and user.pinnedTweet > 0 and
after.len == 0 and query.kind in {posts, replies}:
let tweet = await getCachedTweet(user.pinnedTweet)
if not tweet.isNil:
tweet.pinned = true
pinned = some tweet
result.user = await user
result.photoRail = await rail
result = Profile(
user: user,
pinned: pinned,
tweets: await timeline,
photoRail: await rail
)
if result.user.protected or result.user.suspended:
return
result.tweets.query = query
@@ -66,11 +82,11 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1:
let
timeline = await getGraphTweetSearch(query, after)
timeline = await getSearch[Tweet](query, after)
html = renderTweetSearch(timeline, prefs, getPath())
return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
var profile = await fetchProfile(after, query)
var profile = await fetchProfile(after, query, skipPinned=prefs.hidePins)
template u: untyped = profile.user
if u.suspended:
@@ -105,16 +121,9 @@ proc createTimelineRouter*(cfg: Config) =
get "/intent/user":
respUserId()
get "/intent/follow/?":
let username = request.params.getOrDefault("screen_name")
if username.len == 0:
resp Http400, showError("Missing screen_name parameter", cfg)
redirect("/" & username)
get "/@name/?@tab?/?":
cond '.' notin @"name"
cond @"name" notin ["pic", "gif", "video", "search", "settings", "login", "intent", "i"]
cond @"name".allCharsInSet({'a'..'z', 'A'..'Z', '0'..'9', '_', ','})
cond @"name" notin ["pic", "gif", "video"]
cond @"tab" in ["with_replies", "media", "search", ""]
let
prefs = cookiePrefs()
@@ -128,7 +137,7 @@ proc createTimelineRouter*(cfg: Config) =
# used for the infinite scroll feature
if @"scroll".len > 0:
if query.fromUser.len != 1:
var timeline = await getGraphTweetSearch(query, after)
var timeline = await getSearch[Tweet](query, after)
if timeline.content.len == 0: resp Http404
timeline.beginning = true
resp $renderTweetSearch(timeline, prefs, getPath())

View File

@@ -17,7 +17,7 @@ proc createUnsupportedRouter*(cfg: Config) =
get "/@name/lists/?": feature()
get "/intent/?@i?":
cond @"i" notin ["user", "follow"]
cond @"i" notin ["user"]
feature()
get "/i/@i?/?@j?":

View File

@@ -1,40 +1,39 @@
@import "_variables";
@import "_mixins";
@import '_variables';
@import '_mixins';
.panel-container {
margin: auto;
font-size: 130%;
margin: auto;
font-size: 130%;
}
.error-panel {
@include center-panel(var(--error_red));
text-align: center;
@include center-panel(var(--error_red));
text-align: center;
}
.search-bar > form {
@include center-panel(var(--darkest_grey));
@include center-panel(var(--darkest_grey));
button {
background: var(--bg_elements);
color: var(--fg_color);
border: 0;
border-radius: 3px;
cursor: pointer;
font-weight: bold;
width: 30px;
height: 30px;
padding: 0px 5px 1px 8px;
}
button {
background: var(--bg_elements);
color: var(--fg_color);
border: 0;
border-radius: 3px;
cursor: pointer;
font-weight: bold;
width: 30px;
height: 30px;
}
input {
font-size: 16px;
width: 100%;
background: var(--bg_elements);
color: var(--fg_color);
border: 0;
border-radius: 4px;
padding: 4px;
margin-right: 8px;
height: unset;
}
input {
font-size: 16px;
width: 100%;
background: var(--bg_elements);
color: var(--fg_color);
border: 0;
border-radius: 4px;
padding: 4px;
margin-right: 8px;
height: unset;
}
}

View File

@@ -66,7 +66,18 @@
}
#search-panel-toggle:checked ~ .search-panel {
max-height: 380px !important;
@if $rows == 6 {
max-height: 200px !important;
}
@if $rows == 5 {
max-height: 300px !important;
}
@if $rows == 4 {
max-height: 300px !important;
}
@if $rows == 3 {
max-height: 365px !important;
}
}
}
}

View File

@@ -1,43 +1,44 @@
// colors
$bg_color: #0f0f0f;
$fg_color: #f8f8f2;
$fg_faded: #f8f8f2cf;
$fg_dark: #ff6c60;
$fg_nav: #ff6c60;
$bg_color: #0F0F0F;
$fg_color: #F8F8F2;
$fg_faded: #F8F8F2CF;
$fg_dark: #FF6C60;
$fg_nav: #FF6C60;
$bg_panel: #161616;
$bg_elements: #121212;
$bg_overlays: #1f1f1f;
$bg_hover: #1a1a1a;
$bg_overlays: #1F1F1F;
$bg_hover: #1A1A1A;
$grey: #888889;
$dark_grey: #404040;
$darker_grey: #282828;
$darkest_grey: #222222;
$border_grey: #3e3e35;
$border_grey: #3E3E35;
$accent: #ff6c60;
$accent_light: #ffaca0;
$accent_dark: #8a3731;
$accent_border: #ff6c6091;
$accent: #FF6C60;
$accent_light: #FFACA0;
$accent_dark: #8A3731;
$accent_border: #FF6C6091;
$play_button: #d8574d;
$play_button_hover: #ff6c60;
$play_button: #D8574D;
$play_button_hover: #FF6C60;
$more_replies_dots: #ad433b;
$error_red: #420a05;
$more_replies_dots: #AD433B;
$error_red: #420A05;
$verified_blue: #1da1f2;
$verified_business: #fac82b;
$verified_government: #c1b6a4;
$verified_blue: #1DA1F2;
$icon_text: $fg_color;
$tab: $fg_color;
$tab_selected: $accent;
$shadow: rgba(0, 0, 0, 0.6);
$shadow_dark: rgba(0, 0, 0, 0.2);
$shadow: rgba(0,0,0,.6);
$shadow_dark: rgba(0,0,0,.2);
//fonts
$font_0: sans-serif;
$font_1: fontello;
$font_0: Helvetica Neue;
$font_1: Helvetica;
$font_2: Arial;
$font_3: sans-serif;
$font_4: fontello;

View File

@@ -1,202 +1,165 @@
@import "_variables";
@import '_variables';
@import "tweet/_base";
@import "profile/_base";
@import "general";
@import "navbar";
@import "inputs";
@import "timeline";
@import "search";
@import 'tweet/_base';
@import 'profile/_base';
@import 'general';
@import 'navbar';
@import 'inputs';
@import 'timeline';
@import 'search';
body {
// colors
--bg_color: #{$bg_color};
--fg_color: #{$fg_color};
--fg_faded: #{$fg_faded};
--fg_dark: #{$fg_dark};
--fg_nav: #{$fg_nav};
// colors
--bg_color: #{$bg_color};
--fg_color: #{$fg_color};
--fg_faded: #{$fg_faded};
--fg_dark: #{$fg_dark};
--fg_nav: #{$fg_nav};
--bg_panel: #{$bg_panel};
--bg_elements: #{$bg_elements};
--bg_overlays: #{$bg_overlays};
--bg_hover: #{$bg_hover};
--bg_panel: #{$bg_panel};
--bg_elements: #{$bg_elements};
--bg_overlays: #{$bg_overlays};
--bg_hover: #{$bg_hover};
--grey: #{$grey};
--dark_grey: #{$dark_grey};
--darker_grey: #{$darker_grey};
--darkest_grey: #{$darkest_grey};
--border_grey: #{$border_grey};
--grey: #{$grey};
--dark_grey: #{$dark_grey};
--darker_grey: #{$darker_grey};
--darkest_grey: #{$darkest_grey};
--border_grey: #{$border_grey};
--accent: #{$accent};
--accent_light: #{$accent_light};
--accent_dark: #{$accent_dark};
--accent_border: #{$accent_border};
--accent: #{$accent};
--accent_light: #{$accent_light};
--accent_dark: #{$accent_dark};
--accent_border: #{$accent_border};
--play_button: #{$play_button};
--play_button_hover: #{$play_button_hover};
--play_button: #{$play_button};
--play_button_hover: #{$play_button_hover};
--more_replies_dots: #{$more_replies_dots};
--error_red: #{$error_red};
--more_replies_dots: #{$more_replies_dots};
--error_red: #{$error_red};
--verified_blue: #{$verified_blue};
--verified_business: #{$verified_business};
--verified_government: #{$verified_government};
--icon_text: #{$icon_text};
--verified_blue: #{$verified_blue};
--icon_text: #{$icon_text};
--tab: #{$fg_color};
--tab_selected: #{$accent};
--tab: #{$fg_color};
--tab_selected: #{$accent};
--profile_stat: #{$fg_color};
--profile_stat: #{$fg_color};
background-color: var(--bg_color);
color: var(--fg_color);
font-family: $font_0, $font_1;
font-size: 15px;
line-height: 1.3;
margin: 0;
background-color: var(--bg_color);
color: var(--fg_color);
font-family: $font_0, $font_1, $font_2, $font_3;
font-size: 14px;
line-height: 1.3;
margin: 0;
}
* {
outline: unset;
margin: 0;
text-decoration: none;
outline: unset;
margin: 0;
text-decoration: none;
}
h1 {
display: inline;
display: inline;
}
h2,
h3 {
font-weight: normal;
h2, h3 {
font-weight: normal;
}
p {
margin: 14px 0;
margin: 14px 0;
}
a {
color: var(--accent);
color: var(--accent);
&:hover {
text-decoration: underline;
}
&:hover {
text-decoration: underline;
}
}
fieldset {
border: 0;
padding: 0;
margin-top: -0.6em;
border: 0;
padding: 0;
margin-top: -0.6em;
}
legend {
width: 100%;
padding: 0.6em 0 0.3em 0;
border: 0;
font-size: 16px;
font-weight: 600;
border-bottom: 1px solid var(--border_grey);
margin-bottom: 8px;
width: 100%;
padding: .6em 0 .3em 0;
border: 0;
font-size: 16px;
font-weight: 600;
border-bottom: 1px solid var(--border_grey);
margin-bottom: 8px;
}
.preferences .note {
border-top: 1px solid var(--border_grey);
border-bottom: 1px solid var(--border_grey);
padding: 6px 0 8px 0;
margin-bottom: 8px;
margin-top: 16px;
border-top: 1px solid var(--border_grey);
border-bottom: 1px solid var(--border_grey);
padding: 6px 0 8px 0;
margin-bottom: 8px;
margin-top: 16px;
}
ul {
padding-left: 1.3em;
padding-left: 1.3em;
}
.container {
display: flex;
flex-wrap: wrap;
box-sizing: border-box;
padding-top: 50px;
margin: auto;
min-height: 100vh;
display: flex;
flex-wrap: wrap;
box-sizing: border-box;
padding-top: 50px;
margin: auto;
min-height: 100vh;
}
.icon-container {
display: inline;
display: inline;
}
.overlay-panel {
max-width: 600px;
width: 100%;
margin: 0 auto;
margin-top: 10px;
background-color: var(--bg_overlays);
padding: 10px 15px;
align-self: start;
max-width: 600px;
width: 100%;
margin: 0 auto;
margin-top: 10px;
background-color: var(--bg_overlays);
padding: 10px 15px;
align-self: start;
ul {
margin-bottom: 14px;
}
ul {
margin-bottom: 14px;
}
p {
word-break: break-word;
}
p {
word-break: break-word;
}
}
.verified-icon {
display: inline-block;
width: 14px;
height: 14px;
margin-left: 2px;
.verified-icon-circle {
position: absolute;
font-size: 15px;
}
.verified-icon-check {
position: absolute;
font-size: 9px;
margin: 5px 3px;
}
&.blue {
.verified-icon-circle {
color: var(--verified_blue);
}
.verified-icon-check {
color: var(--icon_text);
}
}
&.business {
.verified-icon-circle {
color: var(--verified_business);
}
.verified-icon-check {
color: var(--bg_panel);
}
}
&.government {
.verified-icon-circle {
color: var(--verified_government);
}
.verified-icon-check {
color: var(--bg_panel);
}
}
color: var(--icon_text);
background-color: var(--verified_blue);
border-radius: 50%;
flex-shrink: 0;
margin: 2px 0 3px 3px;
padding-top: 2px;
height: 12px;
width: 14px;
font-size: 8px;
display: inline-block;
text-align: center;
vertical-align: middle;
}
@media (max-width: 600px) {
.preferences-container {
max-width: 95vw;
}
@media(max-width: 600px) {
.preferences-container {
max-width: 95vw;
}
.nav-item,
.nav-item .icon-container {
font-size: 16px;
}
.nav-item, .nav-item .icon-container {
font-size: 16px;
}
}

View File

@@ -1,203 +1,185 @@
@import "_variables";
@import "_mixins";
@import '_variables';
@import '_mixins';
button {
@include input-colors;
background-color: var(--bg_elements);
color: var(--fg_color);
border: 1px solid var(--accent_border);
padding: 3px 6px;
font-size: 14px;
cursor: pointer;
float: right;
@include input-colors;
background-color: var(--bg_elements);
color: var(--fg_color);
border: 1px solid var(--accent_border);
padding: 3px 6px;
font-size: 14px;
cursor: pointer;
float: right;
}
input[type="text"],
input[type="date"],
input[type="number"],
select {
@include input-colors;
background-color: var(--bg_elements);
padding: 1px 4px;
color: var(--fg_color);
border: 1px solid var(--accent_border);
border-radius: 0;
font-size: 14px;
@include input-colors;
background-color: var(--bg_elements);
padding: 1px 4px;
color: var(--fg_color);
border: 1px solid var(--accent_border);
border-radius: 0;
font-size: 14px;
}
input[type="number"] {
-moz-appearance: textfield;
}
input[type="text"],
input[type="number"] {
height: 16px;
input[type="text"] {
height: 16px;
}
select {
height: 20px;
padding: 0 2px;
line-height: 1;
height: 20px;
padding: 0 2px;
line-height: 1;
}
input[type="date"]::-webkit-inner-spin-button {
display: none;
}
input[type="number"] {
-moz-appearance: textfield;
}
input[type="number"]::-webkit-inner-spin-button,
input[type="number"]::-webkit-outer-spin-button {
display: none;
-webkit-appearance: none;
margin: 0;
display: none;
}
input[type="date"]::-webkit-clear-button {
margin-left: 17px;
filter: grayscale(100%);
filter: hue-rotate(120deg);
margin-left: 17px;
filter: grayscale(100%);
filter: hue-rotate(120deg);
}
input::-webkit-calendar-picker-indicator {
opacity: 0;
opacity: 0;
}
input::-webkit-datetime-edit-day-field:focus,
input::-webkit-datetime-edit-month-field:focus,
input::-webkit-datetime-edit-year-field:focus {
background-color: var(--accent);
color: var(--fg_color);
outline: none;
background-color: var(--accent);
color: var(--fg_color);
outline: none;
}
.date-range {
.date-input {
display: inline-block;
position: relative;
}
.date-input {
display: inline-block;
position: relative;
}
.icon-container {
pointer-events: none;
position: absolute;
top: 2px;
right: 5px;
}
.icon-container {
pointer-events: none;
position: absolute;
top: 2px;
right: 5px;
}
.search-title {
margin: 0 2px;
}
.search-title {
margin: 0 2px;
}
}
.icon-button button {
color: var(--accent);
text-decoration: none;
background: none;
border: none;
float: none;
padding: unset;
padding-left: 4px;
color: var(--accent);
text-decoration: none;
background: none;
border: none;
float: none;
padding: unset;
padding-left: 4px;
&:hover {
color: var(--accent_light);
}
&:hover {
color: var(--accent_light);
}
}
.checkbox {
position: absolute;
top: 1px;
right: 0;
height: 17px;
width: 17px;
background-color: var(--bg_elements);
border: 1px solid var(--accent_border);
&:after {
content: "";
position: absolute;
display: none;
}
top: 1px;
right: 0;
height: 17px;
width: 17px;
background-color: var(--bg_elements);
border: 1px solid var(--accent_border);
&:after {
content: "";
position: absolute;
display: none;
}
}
.checkbox-container {
display: block;
position: relative;
margin-bottom: 5px;
cursor: pointer;
user-select: none;
padding-right: 22px;
input {
position: absolute;
opacity: 0;
display: block;
position: relative;
margin-bottom: 5px;
cursor: pointer;
height: 0;
width: 0;
user-select: none;
padding-right: 22px;
&:checked ~ .checkbox:after {
display: block;
input {
position: absolute;
opacity: 0;
cursor: pointer;
height: 0;
width: 0;
&:checked ~ .checkbox:after {
display: block;
}
}
}
&:hover input ~ .checkbox {
border-color: var(--accent);
}
&:hover input ~ .checkbox {
border-color: var(--accent);
}
&:active input ~ .checkbox {
border-color: var(--accent_light);
}
&:active input ~ .checkbox {
border-color: var(--accent_light);
}
.checkbox:after {
left: 2px;
bottom: 0;
font-size: 13px;
font-family: $font_1;
content: "\e811";
}
.checkbox:after {
left: 2px;
bottom: 0;
font-size: 13px;
font-family: $font_4;
content: '\e803';
}
}
.pref-group {
display: inline;
display: inline;
}
.preferences {
button {
margin: 6px 0 3px 0;
}
button {
margin: 6px 0 3px 0;
}
label {
padding-right: 150px;
}
label {
padding-right: 150px;
}
select {
position: absolute;
top: 0;
right: 0;
display: block;
-moz-appearance: none;
-webkit-appearance: none;
appearance: none;
}
select {
position: absolute;
top: 0;
right: 0;
display: block;
-moz-appearance: none;
-webkit-appearance: none;
appearance: none;
}
input[type="text"],
input[type="number"] {
position: absolute;
right: 0;
max-width: 140px;
}
input[type="text"] {
position: absolute;
right: 0;
max-width: 140px;
}
.pref-group {
display: block;
}
.pref-group {
display: block;
}
.pref-input {
position: relative;
margin-bottom: 6px;
}
.pref-input {
position: relative;
margin-bottom: 6px;
}
.pref-reset {
float: left;
}
.pref-reset {
float: left;
}
}

View File

@@ -1,87 +1,88 @@
@import "_variables";
@import '_variables';
nav {
display: flex;
align-items: center;
position: fixed;
background-color: var(--bg_overlays);
box-shadow: 0 0 4px $shadow;
padding: 0;
width: 100%;
height: 50px;
z-index: 1000;
font-size: 16px;
display: flex;
align-items: center;
position: fixed;
background-color: var(--bg_overlays);
box-shadow: 0 0 4px $shadow;
padding: 0;
width: 100%;
height: 50px;
z-index: 1000;
font-size: 16px;
a,
.icon-button button {
color: var(--fg_nav);
}
a, .icon-button button {
color: var(--fg_nav);
}
}
.inner-nav {
margin: auto;
box-sizing: border-box;
padding: 0 10px;
display: flex;
align-items: center;
flex-basis: 920px;
height: 50px;
margin: auto;
box-sizing: border-box;
padding: 0 10px;
display: flex;
align-items: center;
flex-basis: 920px;
height: 50px;
}
.site-name {
font-size: 15px;
font-weight: 600;
line-height: 1;
font-size: 15px;
font-weight: 600;
line-height: 1;
&:hover {
color: var(--accent_light);
text-decoration: unset;
}
&:hover {
color: var(--accent_light);
text-decoration: unset;
}
}
.site-logo {
display: block;
width: 35px;
height: 35px;
display: block;
width: 35px;
height: 35px;
}
.nav-item {
display: flex;
flex: 1;
line-height: 50px;
height: 50px;
overflow: hidden;
flex-wrap: wrap;
align-items: center;
display: flex;
flex: 1;
line-height: 50px;
height: 50px;
overflow: hidden;
flex-wrap: wrap;
align-items: center;
&.right {
text-align: right;
justify-content: flex-end;
}
&.right {
text-align: right;
justify-content: flex-end;
}
&.right a:hover {
color: var(--accent_light);
text-decoration: unset;
}
&.right a {
padding-left: 4px;
&:hover {
color: var(--accent_light);
text-decoration: unset;
}
}
}
.lp {
height: 14px;
display: inline-block;
position: relative;
top: 2px;
fill: var(--fg_nav);
height: 14px;
margin-top: 2px;
display: block;
fill: var(--fg_nav);
&:hover {
fill: var(--accent_light);
}
&:hover {
fill: var(--accent_light);
}
}
.icon-info {
margin: 0 -3px;
.icon-info:before {
margin: 0 -3px;
}
.icon-cog {
font-size: 15px;
padding-left: 0 !important;
font-size: 15px;
}

View File

@@ -73,9 +73,9 @@
}
}
.profile-joindate, .profile-location, .profile-website {
.profile-joindate, .profile-location, profile-website {
color: var(--fg_faded);
margin: 1px 0;
margin: 2px 0;
width: 100%;
}
}
@@ -115,7 +115,7 @@
}
.profile-card-tabs-name {
flex-shrink: 100;
@include breakable;
}
.profile-card-avatar {

View File

@@ -1,120 +1,120 @@
@import "_variables";
@import "_mixins";
@import '_variables';
@import '_mixins';
.search-title {
font-weight: bold;
display: inline-block;
margin-top: 4px;
font-weight: bold;
display: inline-block;
margin-top: 4px;
}
.search-field {
display: flex;
flex-wrap: wrap;
button {
margin: 0 2px 0 0;
padding: 0px 1px 1px 4px;
height: 23px;
display: flex;
align-items: center;
}
flex-wrap: wrap;
.pref-input {
margin: 0 4px 0 0;
flex-grow: 1;
height: 23px;
}
button {
margin: 0 2px 0 0;
height: 23px;
}
input[type="text"],
input[type="number"] {
height: calc(100% - 4px);
width: calc(100% - 8px);
}
.pref-input {
margin: 0 4px 0 0;
flex-grow: 1;
height: 23px;
}
> label {
display: inline;
background-color: var(--bg_elements);
color: var(--fg_color);
border: 1px solid var(--accent_border);
padding: 1px 1px 2px 4px;
font-size: 14px;
cursor: pointer;
margin-bottom: 2px;
input[type="text"] {
height: calc(100% - 4px);
width: calc(100% - 8px);
}
@include input-colors;
}
> label {
display: inline;
background-color: var(--bg_elements);
color: var(--fg_color);
border: 1px solid var(--accent_border);
padding: 1px 6px 2px 6px;
font-size: 14px;
cursor: pointer;
margin-bottom: 2px;
@include create-toggle(search-panel, 380px);
@include input-colors;
}
@include create-toggle(search-panel, 200px);
}
.search-panel {
width: 100%;
max-height: 0;
overflow: hidden;
transition: max-height 0.4s;
width: 100%;
max-height: 0;
overflow: hidden;
transition: max-height 0.4s;
flex-grow: 1;
font-weight: initial;
text-align: left;
flex-grow: 1;
font-weight: initial;
text-align: left;
.checkbox-container {
display: inline;
padding-right: unset;
margin-bottom: 5px;
margin-left: 23px;
}
> div {
line-height: 1.7em;
}
.checkbox {
right: unset;
left: -22px;
line-height: 1.6em;
}
.checkbox-container {
display: inline;
padding-right: unset;
margin-bottom: unset;
margin-left: 23px;
}
.checkbox-container .checkbox:after {
top: -4px;
}
.checkbox {
right: unset;
left: -22px;
}
.checkbox-container .checkbox:after {
top: -4px;
}
}
.search-row {
display: flex;
flex-wrap: wrap;
line-height: unset;
display: flex;
flex-wrap: wrap;
line-height: unset;
> div {
flex-grow: 1;
flex-shrink: 1;
}
input {
height: 21px;
}
.pref-input {
display: block;
padding-bottom: 5px;
> div {
flex-grow: 1;
flex-shrink: 1;
}
input {
height: 21px;
margin-top: 1px;
height: 21px;
}
.pref-input {
display: block;
padding-bottom: 5px;
input {
height: 21px;
margin-top: 1px;
}
}
}
}
.search-toggles {
flex-grow: 1;
display: grid;
grid-template-columns: repeat(5, auto);
grid-column-gap: 10px;
flex-grow: 1;
display: grid;
grid-template-columns: repeat(6, auto);
grid-column-gap: 10px;
}
.profile-tabs {
@include search-resize(820px, 5);
@include search-resize(715px, 4);
@include search-resize(700px, 5);
@include search-resize(485px, 4);
@include search-resize(410px, 3);
@include search-resize(820px, 5);
@include search-resize(725px, 4);
@include search-resize(600px, 6);
@include search-resize(560px, 5);
@include search-resize(480px, 4);
@include search-resize(410px, 3);
}
@include search-resize(700px, 5);
@include search-resize(485px, 4);
@include search-resize(560px, 5);
@include search-resize(480px, 4);
@include search-resize(410px, 3);

View File

@@ -1,162 +1,162 @@
@import "_variables";
@import '_variables';
.timeline-container {
@include panel(100%, 600px);
@include panel(100%, 600px);
}
.timeline {
background-color: var(--bg_panel);
background-color: var(--bg_panel);
> div:not(:first-child) {
border-top: 1px solid var(--border_grey);
}
> div:not(:first-child) {
border-top: 1px solid var(--border_grey);
}
}
.timeline-header {
width: 100%;
background-color: var(--bg_panel);
text-align: center;
padding: 8px;
display: block;
font-weight: bold;
margin-bottom: 5px;
box-sizing: border-box;
width: 100%;
background-color: var(--bg_panel);
text-align: center;
padding: 8px;
display: block;
font-weight: bold;
margin-bottom: 5px;
box-sizing: border-box;
button {
float: unset;
}
button {
float: unset;
}
}
.timeline-banner img {
width: 100%;
width: 100%;
}
.timeline-description {
font-weight: normal;
font-weight: normal;
}
.tab {
align-items: center;
display: flex;
flex-wrap: wrap;
list-style: none;
margin: 0 0 5px 0;
background-color: var(--bg_panel);
padding: 0;
align-items: center;
display: flex;
flex-wrap: wrap;
list-style: none;
margin: 0 0 5px 0;
background-color: var(--bg_panel);
padding: 0;
}
.tab-item {
flex: 1 1 0;
text-align: center;
margin-top: 0;
flex: 1 1 0;
text-align: center;
margin-top: 0;
a {
border-bottom: 0.1rem solid transparent;
color: var(--tab);
display: block;
padding: 8px 0;
text-decoration: none;
font-weight: bold;
a {
border-bottom: .1rem solid transparent;
color: var(--tab);
display: block;
padding: 8px 0;
text-decoration: none;
font-weight: bold;
&:hover {
text-decoration: none;
&:hover {
text-decoration: none;
}
&.active {
border-bottom-color: var(--tab_selected);
color: var(--tab_selected);
}
}
&.active {
border-bottom-color: var(--tab_selected);
color: var(--tab_selected);
&.active a {
border-bottom-color: var(--tab_selected);
color: var(--tab_selected);
}
}
&.active a {
border-bottom-color: var(--tab_selected);
color: var(--tab_selected);
}
&.wide {
flex-grow: 1.2;
flex-basis: 50px;
}
&.wide {
flex-grow: 1.2;
flex-basis: 50px;
}
}
.timeline-footer {
background-color: var(--bg_panel);
padding: 6px 0;
background-color: var(--bg_panel);
padding: 6px 0;
}
.timeline-protected {
text-align: center;
text-align: center;
p {
margin: 8px 0;
}
p {
margin: 8px 0;
}
h2 {
color: var(--accent);
font-size: 20px;
font-weight: 600;
}
h2 {
color: var(--accent);
font-size: 20px;
font-weight: 600;
}
}
.timeline-none {
color: var(--accent);
font-size: 20px;
font-weight: 600;
text-align: center;
color: var(--accent);
font-size: 20px;
font-weight: 600;
text-align: center;
}
.timeline-end {
background-color: var(--bg_panel);
color: var(--accent);
font-size: 16px;
font-weight: 600;
text-align: center;
background-color: var(--bg_panel);
color: var(--accent);
font-size: 16px;
font-weight: 600;
text-align: center;
}
.show-more {
background-color: var(--bg_panel);
text-align: center;
padding: 0.75em 0;
display: block !important;
background-color: var(--bg_panel);
text-align: center;
padding: .75em 0;
display: block !important;
a {
background-color: var(--darkest_grey);
display: inline-block;
height: 2em;
padding: 0 2em;
line-height: 2em;
a {
background-color: var(--darkest_grey);
display: inline-block;
height: 2em;
padding: 0 2em;
line-height: 2em;
&:hover {
background-color: var(--darker_grey);
&:hover {
background-color: var(--darker_grey);
}
}
}
}
.top-ref {
background-color: var(--bg_color);
border-top: none !important;
background-color: var(--bg_color);
border-top: none !important;
.icon-down {
font-size: 20px;
display: flex;
justify-content: center;
text-decoration: none;
.icon-down {
font-size: 20px;
display: flex;
justify-content: center;
text-decoration: none;
&:hover {
color: var(--accent_light);
&:hover {
color: var(--accent_light);
}
&::before {
transform: rotate(180deg) translateY(-1px);
}
}
&::before {
transform: rotate(180deg) translateY(-1px);
}
}
}
.timeline-item {
overflow-wrap: break-word;
border-left-width: 0;
min-width: 0;
padding: 0.75em;
display: flex;
position: relative;
overflow-wrap: break-word;
border-left-width: 0;
min-width: 0;
padding: .75em;
display: flex;
position: relative;
}

View File

@@ -1,244 +1,231 @@
@import "_variables";
@import "_mixins";
@import "thread";
@import "media";
@import "video";
@import "embed";
@import "card";
@import "poll";
@import "quote";
@import '_variables';
@import '_mixins';
@import 'thread';
@import 'media';
@import 'video';
@import 'embed';
@import 'card';
@import 'poll';
@import 'quote';
.tweet-body {
flex: 1;
min-width: 0;
margin-left: 58px;
pointer-events: none;
z-index: 1;
flex: 1;
min-width: 0;
margin-left: 58px;
pointer-events: none;
z-index: 1;
}
.tweet-content {
line-height: 1.3em;
pointer-events: all;
display: inline;
font-family: $font_3;
line-height: 1.3em;
pointer-events: all;
display: inline;
}
.tweet-bidi {
display: block !important;
display: block !important;
}
.tweet-header {
padding: 0;
vertical-align: bottom;
flex-basis: 100%;
margin-bottom: 0.2em;
padding: 0;
vertical-align: bottom;
flex-basis: 100%;
margin-bottom: .2em;
a {
display: inline-block;
word-break: break-all;
max-width: 100%;
pointer-events: all;
}
a {
display: inline-block;
word-break: break-all;
max-width: 100%;
pointer-events: all;
}
}
.tweet-name-row {
padding: 0;
display: flex;
justify-content: space-between;
padding: 0;
display: flex;
justify-content: space-between;
}
.fullname-and-username {
display: flex;
min-width: 0;
display: flex;
min-width: 0;
}
.fullname {
@include ellipsis;
flex-shrink: 2;
max-width: 80%;
font-size: 14px;
font-weight: 700;
color: var(--fg_color);
@include ellipsis;
flex-shrink: 2;
max-width: 80%;
font-size: 14px;
font-weight: 700;
color: var(--fg_color);
}
.username {
@include ellipsis;
min-width: 1.6em;
margin-left: 0.4em;
word-wrap: normal;
@include ellipsis;
min-width: 1.6em;
margin-left: .4em;
word-wrap: normal;
}
.tweet-date {
display: flex;
flex-shrink: 0;
margin-left: 4px;
display: flex;
flex-shrink: 0;
margin-left: 4px;
}
.tweet-date a,
.username,
.show-more a {
color: var(--fg_dark);
.tweet-date a, .username, .show-more a {
color: var(--fg_dark);
}
.tweet-published {
margin: 0;
margin-top: 5px;
color: var(--grey);
pointer-events: all;
margin: 0;
margin-top: 5px;
color: var(--grey);
pointer-events: all;
}
.tweet-avatar {
display: contents !important;
display: contents !important;
img {
float: left;
margin-top: 3px;
margin-left: -58px;
width: 48px;
height: 48px;
}
img {
float: left;
margin-top: 3px;
margin-left: -58px;
width: 48px;
height: 48px;
}
}
.avatar {
&.round {
border-radius: 50%;
-webkit-user-select: none;
}
position: absolute;
&.mini {
position: unset;
margin-right: 5px;
margin-top: -1px;
width: 20px;
height: 20px;
}
&.round {
border-radius: 50%;
}
&.mini {
position: unset;
margin-right: 5px;
margin-top: -1px;
width: 20px;
height: 20px;
}
}
.tweet-embed {
display: flex;
flex-direction: column;
justify-content: center;
height: 100%;
background-color: var(--bg_panel);
.tweet-content {
font-size: 18px;
}
.tweet-body {
display: flex;
flex-direction: column;
max-height: calc(100vh - 0.75em * 2);
}
justify-content: center;
height: 100%;
background-color: var(--bg_panel);
.card-image img {
height: auto;
}
.avatar {
position: absolute;
}
.tweet-content {
font-size: 18px
}
.tweet-body {
display: flex;
flex-direction: column;
max-height: calc(100vh - 0.75em * 2);
}
}
.attribution {
display: flex;
pointer-events: all;
margin: 5px 0;
display: flex;
pointer-events: all;
margin: 5px 0;
strong {
color: var(--fg_color);
}
strong {
color: var(--fg_color);
}
}
.media-tag-block {
padding-top: 5px;
pointer-events: all;
color: var(--fg_faded);
.icon-container {
padding-right: 2px;
}
.media-tag,
.icon-container {
padding-top: 5px;
pointer-events: all;
color: var(--fg_faded);
}
.icon-container {
padding-right: 2px;
}
.media-tag, .icon-container {
color: var(--fg_faded);
}
}
.timeline-container .media-tag-block {
font-size: 13px;
font-size: 13px;
}
.tweet-geo {
color: var(--fg_faded);
color: var(--fg_faded);
}
.replying-to {
color: var(--fg_faded);
margin: -2px 0 4px;
color: var(--fg_faded);
margin: -2px 0 4px;
a {
pointer-events: all;
}
a {
pointer-events: all;
}
}
.retweet-header,
.pinned,
.tweet-stats {
align-content: center;
color: var(--grey);
display: flex;
flex-shrink: 0;
flex-wrap: wrap;
font-size: 14px;
font-weight: 600;
line-height: 22px;
.retweet-header, .pinned, .tweet-stats {
align-content: center;
color: var(--grey);
display: flex;
flex-shrink: 0;
flex-wrap: wrap;
font-size: 14px;
font-weight: 600;
line-height: 22px;
span {
@include ellipsis;
}
span {
@include ellipsis;
}
}
.retweet-header {
margin-top: -5px !important;
margin-top: -5px !important;
}
.tweet-stats {
margin-bottom: -3px;
-webkit-user-select: none;
margin-bottom: -3px;
}
.tweet-stat {
padding-top: 5px;
min-width: 1em;
margin-right: 0.8em;
padding-top: 5px;
min-width: 1em;
margin-right: 0.8em;
}
.show-thread {
display: block;
pointer-events: all;
padding-top: 2px;
display: block;
pointer-events: all;
padding-top: 2px;
}
.unavailable-box {
width: 100%;
height: 100%;
padding: 12px;
border: solid 1px var(--dark_grey);
box-sizing: border-box;
border-radius: 10px;
background-color: var(--bg_color);
z-index: 2;
width: 100%;
height: 100%;
padding: 12px;
border: solid 1px var(--dark_grey);
box-sizing: border-box;
border-radius: 10px;
background-color: var(--bg_color);
z-index: 2;
}
.tweet-link {
height: 100%;
width: 100%;
left: 0;
top: 0;
position: absolute;
-webkit-user-select: none;
height: 100%;
width: 100%;
left: 0;
top: 0;
position: absolute;
&:hover {
background-color: var(--bg_hover);
}
&:hover {
background-color: var(--bg_hover);
}
}

View File

@@ -42,7 +42,6 @@
.card-description {
margin: 0.3em 0;
white-space: pre-wrap;
}
.card-destination {

View File

@@ -1,17 +1,17 @@
@import "_variables";
@import "_mixins";
@import '_variables';
@import '_mixins';
.embed-video {
.gallery-video {
width: 100%;
height: 100%;
position: absolute;
background-color: black;
top: 0%;
left: 0%;
}
.gallery-video {
width: 100%;
height: 100%;
position: absolute;
background-color: black;
top: 0%;
left: 0%;
}
.video-container {
max-height: unset;
}
.video-container {
max-height: unset;
}
}

View File

@@ -1,76 +1,76 @@
@import "_variables";
@import '_variables';
.gallery-row {
display: flex;
flex-direction: row;
flex-wrap: nowrap;
align-items: center;
overflow: hidden;
flex-grow: 1;
max-height: 379.5px;
max-width: 533px;
pointer-events: all;
.still-image {
width: 100%;
display: flex;
}
flex-direction: row;
flex-wrap: nowrap;
align-items: center;
overflow: hidden;
flex-grow: 1;
max-height: 379.5px;
max-width: 533px;
pointer-events: all;
.still-image {
width: 100%;
display: flex;
}
}
.attachments {
margin-top: 0.35em;
display: flex;
flex-direction: row;
width: 100%;
max-height: 600px;
border-radius: 7px;
overflow: hidden;
flex-flow: column;
background-color: var(--bg_color);
align-items: center;
pointer-events: all;
.image-attachment {
margin-top: .35em;
display: flex;
flex-direction: row;
width: 100%;
}
max-height: 600px;
border-radius: 7px;
overflow: hidden;
flex-flow: column;
background-color: var(--bg_color);
align-items: center;
pointer-events: all;
.image-attachment {
width: 100%;
}
}
.attachment {
position: relative;
line-height: 0;
overflow: hidden;
margin: 0 0.25em 0 0;
flex-grow: 1;
box-sizing: border-box;
min-width: 2em;
position: relative;
line-height: 0;
overflow: hidden;
margin: 0 .25em 0 0;
flex-grow: 1;
box-sizing: border-box;
min-width: 2em;
&:last-child {
margin: 0;
max-height: 530px;
}
&:last-child {
margin: 0;
max-height: 530px;
}
}
.gallery-gif video {
max-height: 530px;
background-color: #101010;
max-height: 530px;
background-color: #101010;
}
.still-image {
max-height: 379.5px;
max-width: 533px;
justify-content: center;
img {
object-fit: cover;
max-width: 100%;
max-height: 379.5px;
flex-basis: 300px;
flex-grow: 1;
}
max-width: 533px;
justify-content: center;
img {
object-fit: cover;
max-width: 100%;
max-height: 379.5px;
flex-basis: 300px;
flex-grow: 1;
}
}
.image {
display: inline-block;
display: inline-block;
}
// .single-image {
@@ -86,34 +86,34 @@
// }
.overlay-circle {
border-radius: 50%;
background-color: var(--dark_grey);
width: 40px;
height: 40px;
align-items: center;
display: flex;
border-width: 5px;
border-color: var(--play_button);
border-style: solid;
border-radius: 50%;
background-color: var(--dark_grey);
width: 40px;
height: 40px;
align-items: center;
display: flex;
border-width: 5px;
border-color: var(--play_button);
border-style: solid;
}
.overlay-triangle {
width: 0;
height: 0;
border-style: solid;
border-width: 12px 0 12px 17px;
border-color: transparent transparent transparent var(--play_button);
margin-left: 14px;
width: 0;
height: 0;
border-style: solid;
border-width: 12px 0 12px 17px;
border-color: transparent transparent transparent var(--play_button);
margin-left: 14px;
}
.media-gif {
display: table;
background-color: unset;
width: unset;
display: table;
background-color: unset;
width: unset;
}
.media-body {
flex: 1;
padding: 0;
white-space: pre-wrap;
flex: 1;
padding: 0;
white-space: pre-wrap;
}

View File

@@ -1,42 +1,42 @@
@import "_variables";
@import '_variables';
.poll-meter {
overflow: hidden;
position: relative;
margin: 6px 0;
height: 26px;
background: var(--bg_color);
border-radius: 5px;
display: flex;
align-items: center;
overflow: hidden;
position: relative;
margin: 6px 0;
height: 26px;
background: var(--bg_color);
border-radius: 5px;
display: flex;
align-items: center;
}
.poll-choice-bar {
height: 100%;
position: absolute;
background: var(--dark_grey);
height: 100%;
position: absolute;
background: var(--dark_grey);
}
.poll-choice-value {
position: relative;
font-weight: bold;
margin-left: 5px;
margin-right: 6px;
min-width: 30px;
text-align: right;
pointer-events: all;
position: relative;
font-weight: bold;
margin-left: 5px;
margin-right: 6px;
min-width: 30px;
text-align: right;
pointer-events: all;
}
.poll-choice-option {
position: relative;
pointer-events: all;
position: relative;
pointer-events: all;
}
.poll-info {
color: var(--grey);
pointer-events: all;
color: var(--grey);
pointer-events: all;
}
.leader .poll-choice-bar {
background: var(--accent_dark);
background: var(--accent_dark);
}

View File

@@ -1,95 +1,94 @@
@import "_variables";
@import '_variables';
.quote {
margin-top: 10px;
border: solid 1px var(--dark_grey);
border-radius: 10px;
background-color: var(--bg_elements);
overflow: hidden;
pointer-events: all;
position: relative;
width: 100%;
&:hover {
border-color: var(--grey);
}
&.unavailable:hover {
border-color: var(--dark_grey);
}
.tweet-name-row {
padding: 6px 8px;
margin-top: 1px;
}
.quote-text {
margin-top: 10px;
border: solid 1px var(--dark_grey);
border-radius: 10px;
background-color: var(--bg_elements);
overflow: hidden;
white-space: pre-wrap;
word-wrap: break-word;
padding: 0px 8px 8px 8px;
}
pointer-events: all;
position: relative;
width: 100%;
.show-thread {
padding: 0px 8px 6px 8px;
margin-top: -6px;
}
&:hover {
border-color: var(--grey);
}
.replying-to {
padding: 0px 8px;
margin: unset;
}
&.unavailable:hover {
border-color: var(--dark_grey);
}
.tweet-name-row {
padding: 6px 8px;
margin-top: 1px;
}
.quote-text {
overflow: hidden;
white-space: pre-wrap;
word-wrap: break-word;
padding: 0px 8px 8px 8px;
}
.show-thread {
padding: 0px 8px 6px 8px;
margin-top: -6px;
}
.replying-to {
padding: 0px 8px;
margin: unset;
}
}
.unavailable-quote {
padding: 12px;
padding: 12px;
}
.quote-link {
width: 100%;
height: 100%;
left: 0;
top: 0;
position: absolute;
width: 100%;
height: 100%;
left: 0;
top: 0;
position: absolute;
}
.quote-media-container {
max-height: 300px;
display: flex;
.card {
margin: unset;
}
.attachments {
border-radius: 0;
}
.media-gif {
width: 100%;
display: flex;
justify-content: center;
}
.gallery-gif .attachment {
display: flex;
justify-content: center;
background-color: var(--bg_color);
video {
height: unset;
width: unset;
max-height: 100%;
max-width: 100%;
}
}
.gallery-video,
.gallery-gif {
max-height: 300px;
}
display: flex;
.still-image img {
max-height: 250px;
}
.card {
margin: unset;
}
.attachments {
border-radius: 0;
}
.media-gif {
width: 100%;
display: flex;
justify-content: center;
}
.gallery-gif .attachment {
display: flex;
justify-content: center;
background-color: var(--bg_color);
video {
height: unset;
width: unset;
max-height: 100%;
max-width: 100%;
}
}
.gallery-video, .gallery-gif {
max-height: 300px;
}
.still-image img {
max-height: 250px
}
}

View File

@@ -1,139 +1,113 @@
@import "_variables";
@import "_mixins";
@import '_variables';
@import '_mixins';
.conversation {
@include panel(100%, 600px);
@include panel(100%, 600px);
.show-more {
margin-bottom: 10px;
}
.show-more {
margin-bottom: 10px;
}
}
.main-thread {
margin-bottom: 20px;
background-color: var(--bg_panel);
margin-bottom: 20px;
background-color: var(--bg_panel);
}
.main-tweet,
.replies {
padding-top: 50px;
margin-top: -50px;
.main-tweet, .replies {
padding-top: 50px;
margin-top: -50px;
}
.main-tweet .tweet-content {
font-size: 18px;
font-size: 18px;
}
@media (max-width: 600px) {
.main-tweet .tweet-content {
font-size: 16px;
}
@media(max-width: 600px) {
.main-tweet .tweet-content {
font-size: 16px;
}
}
.reply {
background-color: var(--bg_panel);
margin-bottom: 10px;
background-color: var(--bg_panel);
margin-bottom: 10px;
}
.thread-line {
.timeline-item::before,
&.timeline-item::before {
background: var(--accent_dark);
content: "";
position: relative;
min-width: 3px;
width: 3px;
left: 26px;
border-radius: 2px;
margin-left: -3px;
margin-bottom: 37px;
top: 56px;
z-index: 1;
pointer-events: none;
}
.timeline-item::before,
&.timeline-item::before {
background: var(--accent_dark);
content: '';
position: relative;
min-width: 3px;
width: 3px;
left: 26px;
border-radius: 2px;
margin-left: -3px;
margin-bottom: 37px;
top: 56px;
z-index: 1;
pointer-events: none;
}
.with-header:not(:first-child)::after {
background: var(--accent_dark);
content: "";
position: relative;
float: left;
min-width: 3px;
width: 3px;
right: calc(100% - 26px);
border-radius: 2px;
margin-left: -3px;
margin-bottom: 37px;
bottom: 10px;
height: 30px;
z-index: 1;
pointer-events: none;
}
.with-header:not(:first-child)::after {
background: var(--accent_dark);
content: '';
position: relative;
float: left;
min-width: 3px;
width: 3px;
right: calc(100% - 26px);
border-radius: 2px;
margin-left: -3px;
margin-bottom: 37px;
bottom: 10px;
height: 30px;
z-index: 1;
pointer-events: none;
}
.unavailable::before {
top: 48px;
margin-bottom: 28px;
}
.unavailable::before {
top: 48px;
margin-bottom: 28px;
}
.more-replies::before {
content: "...";
background: unset;
color: var(--more_replies_dots);
font-weight: bold;
font-size: 20px;
line-height: 0.25em;
left: 1.2em;
width: 5px;
top: 2px;
margin-bottom: 0;
margin-left: -2.5px;
}
.more-replies::before {
content: '...';
background: unset;
color: var(--more_replies_dots);
font-weight: bold;
font-size: 20px;
line-height: 0.25em;
left: 1.2em;
width: 5px;
top: 2px;
margin-bottom: 0;
margin-left: -2.5px;
}
.earlier-replies {
padding-bottom: 0;
margin-bottom: -5px;
}
.earlier-replies {
padding-bottom: 0;
margin-bottom: -5px;
}
}
.timeline-item.thread-last::before {
background: unset;
min-width: unset;
width: 0;
margin: 0;
background: unset;
min-width: unset;
width: 0;
margin: 0;
}
.more-replies {
padding-top: 0.3em !important;
padding-top: 0.3em !important;
}
.more-replies-text {
@include ellipsis;
display: block;
margin-left: 58px;
padding: 7px 0;
}
.timeline-item.thread.more-replies-thread {
padding: 0 0.75em;
&::before {
top: 40px;
margin-bottom: 31px;
}
.more-replies {
display: flex;
padding-top: unset !important;
margin-top: 8px;
&::before {
display: inline-block;
position: relative;
top: -1px;
line-height: 0.4em;
}
.more-replies-text {
display: inline;
}
}
@include ellipsis;
display: block;
margin-left: 58px;
padding: 7px 0;
}

View File

@@ -1,77 +1,66 @@
@import "_variables";
@import "_mixins";
@import '_variables';
@import '_mixins';
video {
height: 100%;
width: 100%;
max-height: 100%;
max-width: 100%;
}
.gallery-video {
display: flex;
overflow: hidden;
display: flex;
overflow: hidden;
}
.gallery-video.card-container {
flex-direction: column;
width: 100%;
flex-direction: column;
}
.video-container {
min-height: 80px;
min-width: 200px;
max-height: 530px;
margin: 0;
max-height: 530px;
margin: 0;
display: flex;
align-items: center;
justify-content: center;
img {
max-height: 100%;
max-width: 100%;
}
img {
max-height: 100%;
max-width: 100%;
}
}
.video-overlay {
@include play-button;
background-color: $shadow;
@include play-button;
background-color: $shadow;
p {
position: relative;
z-index: 0;
text-align: center;
top: calc(50% - 20px);
font-size: 20px;
line-height: 1.3;
margin: 0 20px;
}
p {
position: relative;
z-index: 0;
text-align: center;
top: calc(50% - 20px);
font-size: 20px;
line-height: 1.3;
margin: 0 20px;
}
.overlay-circle {
position: relative;
z-index: 0;
top: calc(50% - 20px);
margin: 0 auto;
width: 40px;
height: 40px;
}
div {
position: relative;
z-index: 0;
top: calc(50% - 20px);
margin: 0 auto;
width: 40px;
height: 40px;
}
.overlay-duration {
position: absolute;
bottom: 8px;
left: 8px;
background-color: #0000007a;
line-height: 1em;
padding: 4px 6px 4px 6px;
border-radius: 5px;
font-weight: bold;
}
form {
width: 100%;
height: 100%;
align-items: center;
justify-content: center;
display: flex;
}
form {
width: 100%;
height: 100%;
align-items: center;
justify-content: center;
display: flex;
}
button {
padding: 5px 8px;
font-size: 16px;
}
button {
padding: 5px 8px;
font-size: 16px;
}
}

View File

@@ -1,62 +0,0 @@
import std/[asyncdispatch, base64, httpclient, random, strutils, sequtils, times]
import nimcrypto
import experimental/parser/tid
randomize()
const defaultKeyword = "obfiowerehiring";
const pairsUrl =
"https://raw.githubusercontent.com/fa0311/x-client-transaction-id-pair-dict/refs/heads/main/pair.json";
var
cachedPairs: seq[TidPair] = @[]
lastCached = 0
# refresh every hour
ttlSec = 60 * 60
proc getPair(): Future[TidPair] {.async.} =
if cachedPairs.len == 0 or int(epochTime()) - lastCached > ttlSec:
lastCached = int(epochTime())
let client = newAsyncHttpClient()
defer: client.close()
let resp = await client.get(pairsUrl)
if resp.status == $Http200:
cachedPairs = parseTidPairs(await resp.body)
return sample(cachedPairs)
proc encodeSha256(text: string): array[32, byte] =
let
data = cast[ptr byte](addr text[0])
dataLen = uint(len(text))
digest = sha256.digest(data, dataLen)
return digest.data
proc encodeBase64[T](data: T): string =
return encode(data).replace("=", "")
proc decodeBase64(data: string): seq[byte] =
return cast[seq[byte]](decode(data))
proc genTid*(path: string): Future[string] {.async.} =
let
pair = await getPair()
timeNow = int(epochTime() - 1682924400)
timeNowBytes = @[
byte(timeNow and 0xff),
byte((timeNow shr 8) and 0xff),
byte((timeNow shr 16) and 0xff),
byte((timeNow shr 24) and 0xff)
]
data = "GET!" & path & "!" & $timeNow & defaultKeyword & pair.animationKey
hashBytes = encodeSha256(data)
keyBytes = decodeBase64(pair.verification)
bytesArr = keyBytes & timeNowBytes & hashBytes[0 ..< 16] & @[3'u8]
randomNum = byte(rand(256))
tid = @[randomNum] & bytesArr.mapIt(it xor randomNum)
return encodeBase64(tid)

154
src/tokens.nim Normal file
View File

@@ -0,0 +1,154 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, times, sequtils, json, random
import strutils, tables
import zippy
import types, consts, http_pool
const
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
maxAge = 2.hours + 55.minutes # tokens expire after 3 hours
failDelay = initDuration(minutes=30)
var
clientPool: HttpPool
tokenPool: seq[Token]
lastFailed: Time
proc getPoolJson*(): JsonNode =
var
list = newJObject()
totalReqs = 0
totalPending = 0
reqsPerApi: Table[string, int]
for token in tokenPool:
totalPending.inc(token.pending)
list[token.tok] = %*{
"apis": newJObject(),
"pending": token.pending,
"init": $token.init,
"lastUse": $token.lastUse
}
for api in token.apis.keys:
list[token.tok]["apis"][$api] = %token.apis[api]
let
maxReqs =
case api
of Api.listMembers, Api.listBySlug, Api.list, Api.userRestId: 500
of Api.timeline: 187
else: 180
reqs = maxReqs - token.apis[api].remaining
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
totalReqs.inc(reqs)
return %*{
"amount": tokenPool.len,
"requests": totalReqs,
"pending": totalPending,
"apis": reqsPerApi,
"tokens": list
}
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited")
proc fetchToken(): Future[Token] {.async.} =
if getTime() - lastFailed < failDelay:
raise rateLimitError()
let headers = newHttpHeaders({
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"accept-encoding": "gzip",
"accept-language": "en-US,en;q=0.5",
"connection": "keep-alive",
"authorization": auth
})
try:
let
resp = clientPool.use(headers): await c.postContent(activate)
tokNode = parseJson(uncompress(resp))["guest_token"]
tok = tokNode.getStr($(tokNode.getInt))
time = getTime()
return Token(tok: tok, init: time, lastUse: time)
except Exception as e:
lastFailed = getTime()
echo "fetching token failed: ", e.msg
proc expired(token: Token): bool =
let time = getTime()
token.init < time - maxAge or token.lastUse < time - maxLastUse
proc isLimited(token: Token; api: Api): bool =
if token.isNil or token.expired:
return true
if api in token.apis:
let limit = token.apis[api]
return (limit.remaining <= 10 and limit.reset > epochTime().int)
else:
return false
proc isReady(token: Token; api: Api): bool =
not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api))
proc release*(token: Token; used=false; invalid=false) =
if token.isNil: return
if invalid or token.expired:
let idx = tokenPool.find(token)
if idx > -1: tokenPool.delete(idx)
elif used:
dec token.pending
token.lastUse = getTime()
proc getToken*(api: Api): Future[Token] {.async.} =
for i in 0 ..< tokenPool.len:
if result.isReady(api): break
release(result)
result = tokenPool.sample()
if not result.isReady(api):
release(result)
result = await fetchToken()
tokenPool.add result
if not result.isNil:
inc result.pending
else:
raise rateLimitError()
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
# avoid undefined behavior in race conditions
if api in token.apis:
let limit = token.apis[api]
if limit.reset >= reset and limit.remaining < remaining:
return
token.apis[api] = RateLimit(remaining: remaining, reset: reset)
proc poolTokens*(amount: int) {.async.} =
var futs: seq[Future[Token]]
for i in 0 ..< amount:
futs.add fetchToken()
for token in futs:
var newToken: Token
try: newToken = await token
except: discard
if not newToken.isNil:
tokenPool.add newToken
proc initTokenPool*(cfg: Config) {.async.} =
clientPool = HttpPool()
while true:
if tokenPool.countIt(not it.isLimited(Api.timeline)) < cfg.minTokens:
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
await sleepAsync(2000)

View File

@@ -6,75 +6,45 @@ genPrefsType()
type
RateLimitError* = object of CatchableError
NoSessionsError* = object of CatchableError
InternalError* = object of CatchableError
BadClientError* = object of CatchableError
TimelineKind* {.pure.} = enum
tweets, replies, media
ApiUrl* = object
endpoint*: string
params*: seq[(string, string)]
ApiReq* = object
oauth*: ApiUrl
cookie*: ApiUrl
Api* {.pure.} = enum
userShow
timeline
search
tweet
list
listBySlug
listMembers
userRestId
status
RateLimit* = object
limit*: int
remaining*: int
reset*: int
SessionKind* = enum
oauth
cookie
Session* = ref object
id*: int64
username*: string
Token* = ref object
tok*: string
init*: Time
lastUse*: Time
pending*: int
limited*: bool
limitedAt*: int
apis*: Table[string, RateLimit]
case kind*: SessionKind
of oauth:
oauthToken*: string
oauthSecret*: string
of cookie:
authToken*: string
ct0*: string
apis*: Table[Api, RateLimit]
Error* = enum
null = 0
noUserMatches = 17
protectedUser = 22
missingParams = 25
timeout = 29
couldntAuth = 32
doesntExist = 34
unauthorized = 37
invalidParam = 47
userNotFound = 50
suspended = 63
rateLimited = 88
expiredToken = 89
invalidToken = 89
listIdOrSlug = 112
tweetNotFound = 144
tweetNotAuthorized = 179
forbidden = 200
badRequest = 214
badToken = 239
locked = 326
noCsrf = 353
tweetUnavailable = 421
tweetCensored = 422
VerifiedType* = enum
none = "None"
blue = "Blue"
business = "Business"
government = "Government"
User* = object
id*: string
@@ -91,7 +61,7 @@ type
tweets*: int
likes*: int
media*: int
verifiedType*: VerifiedType
verified*: bool
protected*: bool
suspended*: bool
joinDate*: DateTime
@@ -105,12 +75,12 @@ type
contentType*: VideoType
url*: string
bitrate*: int
resolution*: int
Video* = object
durationMs*: int
url*: string
thumb*: string
views*: string
available*: bool
reason*: string
title*: string
@@ -130,7 +100,7 @@ type
fromUser*: seq[string]
since*: string
until*: string
minLikes*: string
near*: string
sep*: string
Gif* = object
@@ -174,10 +144,8 @@ type
imageDirectMessage = "image_direct_message"
audiospace = "audiospace"
newsletterPublication = "newsletter_publication"
jobDetails = "job_details"
hidden
unknown
Card* = object
kind*: CardKind
url*: string
@@ -191,7 +159,7 @@ type
replies*: int
retweets*: int
likes*: int
views*: int
quotes*: int
Tweet* = ref object
id*: int64
@@ -206,8 +174,6 @@ type
available*: bool
tombstone*: string
location*: string
# Unused, needed for backwards compat
source*: string
stats*: TweetStats
retweet*: Option[Tweet]
attribution*: Option[User]
@@ -219,8 +185,6 @@ type
video*: Option[Video]
photos*: seq[string]
Tweets* = seq[Tweet]
Result*[T] = object
content*: seq[T]
top*, bottom*: string
@@ -228,7 +192,7 @@ type
query*: Query
Chain* = object
content*: Tweets
content*: seq[Tweet]
hasMore*: bool
cursor*: string
@@ -238,7 +202,7 @@ type
after*: Chain
replies*: Result[Chain]
Timeline* = Result[Tweets]
Timeline* = Result[Tweet]
Profile* = object
user*: User
@@ -275,8 +239,6 @@ type
enableDebug*: bool
proxy*: string
proxyAuth*: string
apiProxy*: string
disableTid*: bool
rssCacheTime*: int
listCacheTime*: int
@@ -292,6 +254,3 @@ type
proc contains*(thread: Chain; tweet: Tweet): bool =
thread.content.anyIt(it.id == tweet.id)
proc add*(timeline: var seq[Tweets]; tweet: Tweet) =
timeline.add @[tweet]

View File

@@ -16,8 +16,7 @@ const
"twimg.com",
"abs.twimg.com",
"pbs.twimg.com",
"video.twimg.com",
"x.com"
"video.twimg.com"
]
proc setHmacKey*(key: string) =
@@ -43,12 +42,6 @@ proc getPicUrl*(link: string): string =
else:
&"/pic/{encodeUrl(link)}"
proc getOrigPicUrl*(link: string): string =
if base64Media:
&"/pic/orig/enc/{encode(link, safe=true)}"
else:
&"/pic/orig/{encodeUrl(link)}"
proc filterParams*(params: Table): seq[(string, string)] =
for p in params.pairs():
if p[1].len > 0 and p[0] notin nitterParams:
@@ -58,4 +51,4 @@ proc isTwitterUrl*(uri: Uri): bool =
uri.hostname in twitterDomains
proc isTwitterUrl*(url: string): bool =
isTwitterUrl(parseUri(url))
parseUri(url).hostname in twitterDomains

View File

@@ -11,12 +11,11 @@ const doctype = "<!DOCTYPE html>\n"
proc renderVideoEmbed*(tweet: Tweet; cfg: Config; req: Request): string =
let thumb = get(tweet.video).thumb
let vidUrl = getVideoEmbed(cfg, tweet.id)
let prefs = Prefs(hlsPlayback: true, mp4Playback: true)
let prefs = Prefs(hlsPlayback: true)
let node = buildHtml(html(lang="en")):
renderHead(prefs, cfg, req, video=vidUrl, images=(@[thumb]))
body:
tdiv(class="embed-video"):
renderVideo(get(tweet.video), prefs, "")
tdiv(class="embed-video"):
renderVideo(get(tweet.video), prefs, "")
result = doctype & $node

View File

@@ -30,15 +30,15 @@ proc renderNavbar(cfg: Config; req: Request; rss, canonical: string): VNode =
tdiv(class="nav-item right"):
icon "search", title="Search", href="/search"
if cfg.enableRss and rss.len > 0:
icon "rss", title="RSS Feed", href=rss
icon "bird", title="Open in X", href=canonical
icon "rss-feed", title="RSS Feed", href=rss
icon "bird", title="Open in Twitter", href=canonical
a(href="https://liberapay.com/zedeus"): verbatim lp
icon "info", title="About", href="/about"
icon "cog", title="Preferences", href=("/settings?referer=" & encodeUrl(path))
proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
video=""; images: seq[string] = @[]; banner=""; ogTitle="";
rss=""; alternate=""): VNode =
rss=""; canonical=""): VNode =
var theme = prefs.theme.toTheme
if "theme" in req.params:
theme = req.params["theme"].toTheme
@@ -52,8 +52,8 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
let opensearchUrl = getUrlPrefix(cfg) & "/opensearch"
buildHtml(head):
link(rel="stylesheet", type="text/css", href="/css/style.css?v=22")
link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=4")
link(rel="stylesheet", type="text/css", href="/css/style.css?v=16")
link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2")
if theme.len > 0:
link(rel="stylesheet", type="text/css", href=(&"/css/themes/{theme}.css"))
@@ -66,14 +66,14 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
link(rel="search", type="application/opensearchdescription+xml", title=cfg.title,
href=opensearchUrl)
if alternate.len > 0:
link(rel="alternate", href=alternate, title="View on X")
if canonical.len > 0:
link(rel="canonical", href=canonical)
if cfg.enableRss and rss.len > 0:
link(rel="alternate", type="application/rss+xml", href=rss, title="RSS feed")
if prefs.hlsPlayback:
script(src="/js/hls.min.js", `defer`="")
script(src="/js/hls.light.min.js", `defer`="")
script(src="/js/hlsPlayback.js", `defer`="")
if prefs.infiniteScroll:
@@ -93,13 +93,14 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
meta(property="og:site_name", content="Nitter")
meta(property="og:locale", content="en_US")
if banner.len > 0 and not banner.startsWith('#'):
if banner.len > 0:
let bannerUrl = getPicUrl(banner)
link(rel="preload", type="image/png", href=bannerUrl, `as`="image")
for url in images:
let preloadUrl = if "400x400" in url: getPicUrl(url)
else: getSmallPic(url)
let suffix = if "400x400" in url or url.endsWith("placeholder.png"): ""
else: "?name=small"
let preloadUrl = getPicUrl(url & suffix)
link(rel="preload", type="image/png", href=preloadUrl, `as`="image")
let image = getUrlPrefix(cfg) & getPicUrl(url)
@@ -119,20 +120,20 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
# this is last so images are also preloaded
# if this is done earlier, Chrome only preloads one image for some reason
link(rel="preload", type="font/woff2", `as`="font",
href="/fonts/fontello.woff2?61663884", crossorigin="anonymous")
href="/fonts/fontello.woff2?21002321", crossorigin="anonymous")
proc renderMain*(body: VNode; req: Request; cfg: Config; prefs=defaultPrefs;
titleText=""; desc=""; ogTitle=""; rss=""; video="";
images: seq[string] = @[]; banner=""): string =
let twitterLink = getTwitterLink(req.path, req.params)
let canonical = getTwitterLink(req.path, req.params)
let node = buildHtml(html(lang="en")):
renderHead(prefs, cfg, req, titleText, desc, video, images, banner, ogTitle,
rss, twitterLink)
rss, canonical)
body:
renderNavbar(cfg, req, rss, twitterLink)
renderNavbar(cfg, req, rss, canonical)
tdiv(class="container"):
body

View File

@@ -50,7 +50,7 @@ proc renderUserCard*(user: User; prefs: Prefs): VNode =
span:
let url = replaceUrls(user.website, prefs)
icon "link"
a(href=url): text url.shortLink
a(href=url): text shortLink(url)
tdiv(class="profile-joindate"):
span(title=getJoinDateFull(user)):
@@ -78,11 +78,8 @@ proc renderPhotoRail(profile: Profile): VNode =
tdiv(class="photo-rail-grid"):
for i, photo in profile.photoRail:
if i == 16: break
let photoSuffix =
if "format" in photo.url or "placeholder" in photo.url: ""
else: ":thumb"
a(href=(&"/{profile.user.username}/status/{photo.tweetId}#m")):
genImg(photo.url & photoSuffix)
genImg(photo.url & (if "format" in photo.url: "" else: ":thumb"))
proc renderBanner(banner: string): VNode =
buildHtml():
@@ -108,7 +105,7 @@ proc renderProfile*(profile: var Profile; prefs: Prefs; path: string): VNode =
renderBanner(profile.user.banner)
let sticky = if prefs.stickyProfile: " sticky" else: ""
tdiv(class=("profile-tab" & sticky)):
tdiv(class=(&"profile-tab{sticky}")):
renderUserCard(profile.user, prefs)
if profile.photoRail.len > 0:
renderPhotoRail(profile)

View File

@@ -1,19 +1,11 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat
import strutils
import karax/[karaxdsl, vdom, vstyles]
import ".."/[types, utils]
const smallWebp* = "?name=small&format=webp"
proc getSmallPic*(url: string): string =
result = url
if "?" notin url and not url.endsWith("placeholder.png"):
result &= smallWebp
result = getPicUrl(result)
proc icon*(icon: string; text=""; title=""; class=""; href=""): VNode =
var c = "icon-" & icon
if class.len > 0: c = &"{c} {class}"
if class.len > 0: c = c & " " & class
buildHtml(tdiv(class="icon-container")):
if href.len > 0:
a(class=c, title=title, href=href)
@@ -23,15 +15,6 @@ proc icon*(icon: string; text=""; title=""; class=""; href=""): VNode =
if text.len > 0:
text " " & text
template verifiedIcon*(user: User): untyped {.dirty.} =
if user.verifiedType != VerifiedType.none:
let lower = ($user.verifiedType).toLowerAscii()
buildHtml(tdiv(class=(&"verified-icon {lower}"))):
icon "circle", class="verified-icon-circle", title=(&"Verified {lower} account")
icon "ok", class="verified-icon-check", title=(&"Verified {lower} account")
else:
text ""
proc linkUser*(user: User, class=""): VNode =
let
isName = "username" notin class
@@ -41,11 +24,11 @@ proc linkUser*(user: User, class=""): VNode =
buildHtml(a(href=href, class=class, title=nameText)):
text nameText
if isName:
verifiedIcon(user)
if user.protected:
text " "
icon "lock", title="Protected account"
if isName and user.verified:
icon "ok", class="verified-icon", title="Verified account"
if isName and user.protected:
text " "
icon "lock", title="Protected account"
proc linkText*(text: string; class=""): VNode =
let url = if "http" notin text: https & text else: text
@@ -68,44 +51,46 @@ proc buttonReferer*(action, text, path: string; class=""; `method`="post"): VNod
proc genCheckbox*(pref, label: string; state: bool): VNode =
buildHtml(label(class="pref-group checkbox-container")):
text label
input(name=pref, `type`="checkbox", checked=state)
if state: input(name=pref, `type`="checkbox", checked="")
else: input(name=pref, `type`="checkbox")
span(class="checkbox")
proc genInput*(pref, label, state, placeholder: string; class=""; autofocus=true): VNode =
proc genInput*(pref, label, state, placeholder: string; class=""): VNode =
let p = placeholder
buildHtml(tdiv(class=("pref-group pref-input " & class))):
if label.len > 0:
label(`for`=pref): text label
input(name=pref, `type`="text", placeholder=p, value=state, autofocus=(autofocus and state.len == 0))
if state.len == 0:
input(name=pref, `type`="text", placeholder=p, value=state, autofocus="")
else:
input(name=pref, `type`="text", placeholder=p, value=state)
proc genSelect*(pref, label, state: string; options: seq[string]): VNode =
buildHtml(tdiv(class="pref-group pref-input")):
label(`for`=pref): text label
select(name=pref):
for opt in options:
option(value=opt, selected=(opt == state)):
text opt
if opt == state:
option(value=opt, selected=""): text opt
else:
option(value=opt): text opt
proc genDate*(pref, state: string): VNode =
buildHtml(span(class="date-input")):
input(name=pref, `type`="date", value=state)
icon "calendar"
proc genNumberInput*(pref, label, state, placeholder: string; class=""; autofocus=true; min="0"): VNode =
let p = placeholder
buildHtml(tdiv(class=("pref-group pref-input " & class))):
if label.len > 0:
label(`for`=pref): text label
input(name=pref, `type`="number", placeholder=p, value=state, autofocus=(autofocus and state.len == 0), min=min, step="1")
proc genImg*(url: string; class=""): VNode =
buildHtml():
img(src=getPicUrl(url), class=class, alt="", loading="lazy")
img(src=getPicUrl(url), class=class, alt="")
proc getTabClass*(query: Query; tab: QueryKind): string =
if query.kind == tab: "tab-item active"
else: "tab-item"
result = "tab-item"
if query.kind == tab:
result &= " active"
proc getAvatarClass*(prefs: Prefs): string =
if prefs.squareAvatars: "avatar"
else: "avatar round"
if prefs.squareAvatars:
"avatar"
else:
"avatar round"

View File

@@ -2,9 +2,6 @@
## SPDX-License-Identifier: AGPL-3.0-only
#import strutils, xmltree, strformat, options, unicode
#import ../types, ../utils, ../formatters, ../prefs
## Snowflake ID cutoff for RSS GUID format transition
## Corresponds to approximately December 14, 2025 UTC
#const guidCutoff = 2000000000000000000'i64
#
#proc getTitle(tweet: Tweet; retweet: string): string =
#if tweet.pinned: result = "Pinned: "
@@ -28,25 +25,7 @@
#end proc
#
#proc getDescription(desc: string; cfg: Config): string =
Twitter feed for: ${desc}. Generated by ${getUrlPrefix(cfg)}
#end proc
#
#proc getTweetsWithPinned(profile: Profile): seq[Tweets] =
#result = profile.tweets.content
#if profile.pinned.isSome and result.len > 0:
# let pinnedTweet = profile.pinned.get
# var inserted = false
# for threadIdx in 0 ..< result.len:
# if not inserted:
# for tweetIdx in 0 ..< result[threadIdx].len:
# if result[threadIdx][tweetIdx].id < pinnedTweet.id:
# result[threadIdx].insert(pinnedTweet, tweetIdx)
# inserted = true
# end if
# end for
# end if
# end for
#end if
Twitter feed for: ${desc}. Generated by ${cfg.hostname}
#end proc
#
#proc renderRssTweet(tweet: Tweet; cfg: Config): string =
@@ -54,70 +33,47 @@ Twitter feed for: ${desc}. Generated by ${getUrlPrefix(cfg)}
#let urlPrefix = getUrlPrefix(cfg)
#let text = replaceUrls(tweet.text, defaultPrefs, absolute=urlPrefix)
<p>${text.replace("\n", "<br>\n")}</p>
#if tweet.quote.isSome and get(tweet.quote).available:
# let quoteLink = getLink(get(tweet.quote))
<p><a href="${urlPrefix}${quoteLink}">${cfg.hostname}${quoteLink}</a></p>
#end if
#if tweet.photos.len > 0:
# for photo in tweet.photos:
<img src="${urlPrefix}${getPicUrl(photo)}" style="max-width:250px;" />
# end for
#elif tweet.video.isSome:
<a href="${urlPrefix}${tweet.getLink}">
<br>Video<br>
<img src="${urlPrefix}${getPicUrl(get(tweet.video).thumb)}" style="max-width:250px;" />
</a>
<img src="${urlPrefix}${getPicUrl(get(tweet.video).thumb)}" style="max-width:250px;" />
#elif tweet.gif.isSome:
# let thumb = &"{urlPrefix}{getPicUrl(get(tweet.gif).thumb)}"
# let url = &"{urlPrefix}{getPicUrl(get(tweet.gif).url)}"
<video poster="${thumb}" autoplay muted loop style="max-width:250px;">
<source src="${url}" type="video/mp4"></video>
<source src="${url}" type="video/mp4"</source></video>
#elif tweet.card.isSome:
# let card = tweet.card.get()
# if card.image.len > 0:
<img src="${urlPrefix}${getPicUrl(card.image)}" style="max-width:250px;" />
# end if
#end if
#if tweet.quote.isSome and get(tweet.quote).available:
# let quoteTweet = get(tweet.quote)
# let quoteLink = urlPrefix & getLink(quoteTweet)
<hr/>
<blockquote>
<b>${quoteTweet.user.fullname} (@${quoteTweet.user.username})</b>
<p>
${renderRssTweet(quoteTweet, cfg)}
</p>
<footer>
— <cite><a href="${quoteLink}">${quoteLink}</a>
</footer>
</blockquote>
#end if
#end proc
#
#proc renderRssTweets(tweets: seq[Tweets]; cfg: Config; userId=""): string =
#proc renderRssTweets(tweets: seq[Tweet]; cfg: Config): string =
#let urlPrefix = getUrlPrefix(cfg)
#var links: seq[string]
#for thread in tweets:
# for tweet in thread:
# if userId.len > 0 and tweet.user.id != userId: continue
# end if
#
# let retweet = if tweet.retweet.isSome: tweet.user.username else: ""
# let tweet = if retweet.len > 0: tweet.retweet.get else: tweet
# let link = getLink(tweet)
# if link in links: continue
# end if
# links.add link
# let useGlobalGuid = tweet.id >= guidCutoff
<item>
<title>${getTitle(tweet, retweet)}</title>
<dc:creator>@${tweet.user.username}</dc:creator>
<description><![CDATA[${renderRssTweet(tweet, cfg).strip(chars={'\n'})}]]></description>
<pubDate>${getRfc822Time(tweet)}</pubDate>
#if useGlobalGuid:
<guid isPermaLink="false">${tweet.id}</guid>
#else:
<guid>${urlPrefix & link}</guid>
#end if
<link>${urlPrefix & link}</link>
</item>
# end for
#for t in tweets:
# let retweet = if t.retweet.isSome: t.user.username else: ""
# let tweet = if retweet.len > 0: t.retweet.get else: t
# let link = getLink(tweet)
# if link in links: continue
# end if
# links.add link
<item>
<title>${getTitle(tweet, retweet)}</title>
<dc:creator>@${tweet.user.username}</dc:creator>
<description><![CDATA[${renderRssTweet(tweet, cfg).strip(chars={'\n'})}]]></description>
<pubDate>${getRfc822Time(tweet)}</pubDate>
<guid>${urlPrefix & link}</guid>
<link>${urlPrefix & link}</link>
</item>
#end for
#end proc
#
@@ -145,15 +101,14 @@ ${renderRssTweet(quoteTweet, cfg)}
<width>128</width>
<height>128</height>
</image>
#let tweetsList = getTweetsWithPinned(profile)
#if tweetsList.len > 0:
${renderRssTweets(tweetsList, cfg, userId=profile.user.id)}
#if profile.tweets.content.len > 0:
${renderRssTweets(profile.tweets.content, cfg)}
#end if
</channel>
</rss>
#end proc
#
#proc renderListRss*(tweets: seq[Tweets]; list: List; cfg: Config): string =
#proc renderListRss*(tweets: seq[Tweet]; list: List; cfg: Config): string =
#let link = &"{getUrlPrefix(cfg)}/i/lists/{list.id}"
#result = ""
<?xml version="1.0" encoding="UTF-8"?>
@@ -162,7 +117,7 @@ ${renderRssTweets(tweetsList, cfg, userId=profile.user.id)}
<atom:link href="${link}" rel="self" type="application/rss+xml" />
<title>${xmltree.escape(list.name)} / @${list.username}</title>
<link>${link}</link>
<description>${getDescription(&"{list.name} by @{list.username}", cfg)}</description>
<description>${getDescription(list.name & " by @" & list.username, cfg)}</description>
<language>en-us</language>
<ttl>40</ttl>
${renderRssTweets(tweets, cfg)}
@@ -170,7 +125,7 @@ ${renderRssTweets(tweets, cfg)}
</rss>
#end proc
#
#proc renderSearchRss*(tweets: seq[Tweets]; name, param: string; cfg: Config): string =
#proc renderSearchRss*(tweets: seq[Tweet]; name, param: string; cfg: Config): string =
#let link = &"{getUrlPrefix(cfg)}/search"
#let escName = xmltree.escape(name)
#result = ""
@@ -180,7 +135,7 @@ ${renderRssTweets(tweets, cfg)}
<atom:link href="${link}" rel="self" type="application/rss+xml" />
<title>Search results for "${escName}"</title>
<link>${link}</link>
<description>${getDescription(&"Search \"{escName}\"", cfg)}</description>
<description>${getDescription("Search \"" & escName & "\"", cfg)}</description>
<language>en-us</language>
<ttl>40</ttl>
${renderRssTweets(tweets, cfg)}

View File

@@ -10,21 +10,23 @@ const toggles = {
"media": "Media",
"videos": "Videos",
"news": "News",
"verified": "Verified",
"native_video": "Native videos",
"replies": "Replies",
"links": "Links",
"images": "Images",
"safe": "Safe",
"quote": "Quotes",
"spaces": "Spaces"
"pro_video": "Pro videos"
}.toOrderedTable
proc renderSearch*(): VNode =
buildHtml(tdiv(class="panel-container")):
tdiv(class="search-bar"):
form(`method`="get", action="/search", autocomplete="off"):
hiddenField("f", "tweets")
hiddenField("f", "users")
input(`type`="text", name="q", autofocus="",
placeholder="Search...", dir="auto")
placeholder="Enter username...", dir="auto")
button(`type`="submit"): icon "search"
proc renderProfileTabs*(query: Query; username: string): VNode =
@@ -51,7 +53,7 @@ proc renderSearchTabs*(query: Query): VNode =
proc isPanelOpen(q: Query): bool =
q.fromUser.len == 0 and (q.filters.len > 0 or q.excludes.len > 0 or
@[q.minLikes, q.until, q.since].anyIt(it.len > 0))
@[q.near, q.until, q.since].anyIt(it.len > 0))
proc renderSearchPanel*(query: Query): VNode =
let user = query.fromUser.join(",")
@@ -61,10 +63,12 @@ proc renderSearchPanel*(query: Query): VNode =
hiddenField("f", "tweets")
genInput("q", "", query.text, "Enter search...", class="pref-inline")
button(`type`="submit"): icon "search"
input(id="search-panel-toggle", `type`="checkbox", checked=isPanelOpen(query))
label(`for`="search-panel-toggle"): icon "down"
if isPanelOpen(query):
input(id="search-panel-toggle", `type`="checkbox", checked="")
else:
input(id="search-panel-toggle", `type`="checkbox")
label(`for`="search-panel-toggle"):
icon "down"
tdiv(class="search-panel"):
for f in @["filter", "exclude"]:
span(class="search-title"): text capitalize(f)
@@ -83,10 +87,10 @@ proc renderSearchPanel*(query: Query): VNode =
span(class="search-title"): text "-"
genDate("until", query.until)
tdiv:
span(class="search-title"): text "Minimum likes"
genNumberInput("min_faves", "", query.minLikes, "Number...", autofocus=false)
span(class="search-title"): text "Near"
genInput("near", "", query.near, placeholder="Location...")
proc renderTweetSearch*(results: Timeline; prefs: Prefs; path: string;
proc renderTweetSearch*(results: Result[Tweet]; prefs: Prefs; path: string;
pinned=none(Tweet)): VNode =
let query = results.query
buildHtml(tdiv(class="timeline-container")):

View File

@@ -28,19 +28,14 @@ proc renderReplyThread(thread: Chain; prefs: Prefs; path: string): VNode =
if thread.hasMore:
renderMoreReplies(thread)
proc renderReplies*(replies: Result[Chain]; prefs: Prefs; path: string; tweet: Tweet = nil): VNode =
proc renderReplies*(replies: Result[Chain]; prefs: Prefs; path: string): VNode =
buildHtml(tdiv(class="replies", id="r")):
var hasReplies = false
var replyCount = 0
for thread in replies.content:
if thread.content.len == 0: continue
hasReplies = true
replyCount += thread.content.len
renderReplyThread(thread, prefs, path)
if hasReplies and replies.bottom.len > 0:
if tweet == nil or not replies.beginning or replyCount < tweet.stats.replies:
renderMore(Query(), replies.bottom, focus="#r")
if replies.bottom.len > 0:
renderMore(Query(), replies.bottom, focus="#r")
proc renderConversation*(conv: Conversation; prefs: Prefs; path: string): VNode =
let hasAfter = conv.after.content.len > 0
@@ -75,6 +70,6 @@ proc renderConversation*(conv: Conversation; prefs: Prefs; path: string): VNode
if not conv.replies.beginning:
renderNewer(Query(), getLink(conv.tweet), focus="#r")
if conv.replies.content.len > 0 or conv.replies.bottom.len > 0:
renderReplies(conv.replies, prefs, path, conv.tweet)
renderReplies(conv.replies, prefs, path)
renderToTop(focus="#m")

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, algorithm, uri, options
import strutils, strformat, sequtils, algorithm, uri, options
import karax/[karaxdsl, vdom]
import ".."/[types, query, formatters]
@@ -39,24 +39,26 @@ proc renderNoneFound(): VNode =
h2(class="timeline-none"):
text "No items found"
proc renderThread(thread: Tweets; prefs: Prefs; path: string): VNode =
proc renderThread(thread: seq[Tweet]; prefs: Prefs; path: string): VNode =
buildHtml(tdiv(class="thread-line")):
let sortedThread = thread.sortedByIt(it.id)
for i, tweet in sortedThread:
# thread has a gap, display "more replies" link
if i > 0 and tweet.replyId != sortedThread[i - 1].id:
tdiv(class="timeline-item thread more-replies-thread"):
tdiv(class="more-replies"):
a(class="more-replies-text", href=getLink(tweet)):
text "more replies"
let show = i == thread.high and sortedThread[0].id != tweet.threadId
let header = if tweet.pinned or tweet.retweet.isSome: "with-header " else: ""
renderTweet(tweet, prefs, path, class=(header & "thread"),
index=i, last=(i == thread.high), showThread=show)
proc threadFilter(tweets: openArray[Tweet]; threads: openArray[int64]; it: Tweet): seq[Tweet] =
result = @[it]
if it.retweet.isSome or it.replyId in threads: return
for t in tweets:
if t.id == result[0].replyId:
result.insert t
elif t.replyId == result[0].id:
result.add t
proc renderUser(user: User; prefs: Prefs): VNode =
buildHtml(tdiv(class="timeline-item", data-username=user.username)):
buildHtml(tdiv(class="timeline-item")):
a(class="tweet-link", href=("/" & user.username))
tdiv(class="tweet-body profile-result"):
tdiv(class="tweet-header"):
@@ -87,7 +89,7 @@ proc renderTimelineUsers*(results: Result[User]; prefs: Prefs; path=""): VNode =
else:
renderNoMore()
proc renderTimelineTweets*(results: Timeline; prefs: Prefs; path: string;
proc renderTimelineTweets*(results: Result[Tweet]; prefs: Prefs; path: string;
pinned=none(Tweet)): VNode =
buildHtml(tdiv(class="timeline")):
if not results.beginning:
@@ -103,26 +105,26 @@ proc renderTimelineTweets*(results: Timeline; prefs: Prefs; path: string;
else:
renderNoneFound()
else:
var retweets: seq[int64]
var
threads: seq[int64]
retweets: seq[int64]
for thread in results.content:
if thread.len == 1:
let
tweet = thread[0]
retweetId = if tweet.retweet.isSome: get(tweet.retweet).id else: 0
for tweet in results.content:
let rt = if tweet.retweet.isSome: get(tweet.retweet).id else: 0
if retweetId in retweets or tweet.id in retweets or
tweet.pinned and prefs.hidePins:
continue
if tweet.id in threads or rt in retweets or tweet.id in retweets or
tweet.pinned and prefs.hidePins: continue
let thread = results.content.threadFilter(threads, tweet)
if thread.len < 2:
var hasThread = tweet.hasThread
if retweetId != 0 and tweet.retweet.isSome:
retweets &= retweetId
if rt != 0:
retweets &= rt
hasThread = get(tweet.retweet).hasThread
renderTweet(tweet, prefs, path, showThread=hasThread)
else:
renderThread(thread, prefs, path)
threads &= thread.mapIt(it.id)
if results.bottom.len > 0:
renderMore(results.query, results.bottom)
renderMore(results.query, results.bottom)
renderToTop()

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, sequtils, strformat, options, algorithm
import strutils, sequtils, strformat, options
import karax/[karaxdsl, vdom, vstyles]
from jester import Request
@@ -7,20 +7,27 @@ import renderutils
import ".."/[types, utils, formatters]
import general
const doctype = "<!DOCTYPE html>\n"
proc getSmallPic(url: string): string =
result = url
if "?" notin url and not url.endsWith("placeholder.png"):
result &= ":small"
result = getPicUrl(result)
proc renderMiniAvatar(user: User; prefs: Prefs): VNode =
genImg(user.getUserPic("_mini"), class=(prefs.getAvatarClass & " mini"))
let url = getPicUrl(user.getUserPic("_mini"))
buildHtml():
img(class=(prefs.getAvatarClass & " mini"), src=url)
proc renderHeader(tweet: Tweet; retweet: string; pinned: bool; prefs: Prefs): VNode =
proc renderHeader(tweet: Tweet; retweet: string; prefs: Prefs): VNode =
buildHtml(tdiv):
if pinned:
tdiv(class="pinned"):
span: icon "pin", "Pinned Tweet"
elif retweet.len > 0:
if retweet.len > 0:
tdiv(class="retweet-header"):
span: icon "retweet", retweet & " retweeted"
if tweet.pinned:
tdiv(class="pinned"):
span: icon "pin", "Pinned Tweet"
tdiv(class="tweet-header"):
a(class="tweet-avatar", href=("/" & tweet.user.username)):
var size = "_bigger"
@@ -50,21 +57,19 @@ proc renderAlbum(tweet: Tweet): VNode =
tdiv(class="attachment image"):
let
named = "name=" in photo
small = if named: photo else: photo & smallWebp
a(href=getOrigPicUrl(photo), class="still-image", target="_blank"):
orig = if named: photo else: photo & "?name=orig"
small = if named: photo else: photo & "?name=small"
a(href=getPicUrl(orig), class="still-image", target="_blank"):
genImg(small)
proc isPlaybackEnabled(prefs: Prefs; playbackType: VideoType): bool =
case playbackType
proc isPlaybackEnabled(prefs: Prefs; video: Video): bool =
case video.playbackType
of mp4: prefs.mp4Playback
of m3u8, vmap: prefs.hlsPlayback
proc hasMp4Url(video: Video): bool =
video.variants.anyIt(it.contentType == mp4)
proc renderVideoDisabled(playbackType: VideoType; path: string): VNode =
proc renderVideoDisabled(video: Video; path: string): VNode =
buildHtml(tdiv(class="video-overlay")):
case playbackType
case video.playbackType
of mp4:
p: text "mp4 playback disabled in preferences"
of m3u8, vmap:
@@ -79,37 +84,35 @@ proc renderVideoUnavailable(video: Video): VNode =
p: text "This media is unavailable"
proc renderVideo*(video: Video; prefs: Prefs; path: string): VNode =
let
container = if video.description.len == 0 and video.title.len == 0: ""
else: " card-container"
playbackType = if not prefs.proxyVideos and video.hasMp4Url: mp4
else: video.playbackType
let container =
if video.description.len > 0 or video.title.len > 0: " card-container"
else: ""
buildHtml(tdiv(class="attachments card")):
tdiv(class="gallery-video" & container):
tdiv(class="attachment video-container"):
let thumb = getSmallPic(video.thumb)
if not video.available:
img(src=thumb, loading="lazy")
img(src=thumb)
renderVideoUnavailable(video)
elif not prefs.isPlaybackEnabled(playbackType):
img(src=thumb, loading="lazy")
renderVideoDisabled(playbackType, path)
elif not prefs.isPlaybackEnabled(video):
img(src=thumb)
renderVideoDisabled(video, path)
else:
let
vars = video.variants.filterIt(it.contentType == playbackType)
vidUrl = vars.sortedByIt(it.resolution)[^1].url
source = if prefs.proxyVideos: getVidUrl(vidUrl)
else: vidUrl
case playbackType
let vid = video.variants.filterIt(it.contentType == video.playbackType)
let source = getVidUrl(vid[0].url)
case video.playbackType
of mp4:
video(poster=thumb, controls="", muted=prefs.muteVideos):
source(src=source, `type`="video/mp4")
if prefs.muteVideos:
video(poster=thumb, controls="", muted=""):
source(src=source, `type`="video/mp4")
else:
video(poster=thumb, controls=""):
source(src=source, `type`="video/mp4")
of m3u8, vmap:
video(poster=thumb, data-url=source, data-autoload="false", muted=prefs.muteVideos)
video(poster=thumb, data-url=source, data-autoload="false")
verbatim "<div class=\"video-overlay\" onclick=\"playVideo(this)\">"
tdiv(class="overlay-circle"): span(class="overlay-triangle")
tdiv(class="overlay-duration"): text getDuration(video)
verbatim "</div>"
if container.len > 0:
tdiv(class="card-content"):
@@ -121,9 +124,14 @@ proc renderGif(gif: Gif; prefs: Prefs): VNode =
buildHtml(tdiv(class="attachments media-gif")):
tdiv(class="gallery-gif", style={maxHeight: "unset"}):
tdiv(class="attachment"):
video(class="gif", poster=getSmallPic(gif.thumb), autoplay=prefs.autoplayGifs,
controls="", muted="", loop=""):
source(src=getPicUrl(gif.url), `type`="video/mp4")
let thumb = getSmallPic(gif.thumb)
let url = getPicUrl(gif.url)
if prefs.autoplayGifs:
video(class="gif", poster=thumb, controls="", autoplay="", muted="", loop=""):
source(src=url, `type`="video/mp4")
else:
video(class="gif", poster=thumb, controls="", muted="", loop=""):
source(src=url, `type`="video/mp4")
proc renderPoll(poll: Poll): VNode =
buildHtml(tdiv(class="poll")):
@@ -138,12 +146,12 @@ proc renderPoll(poll: Poll): VNode =
span(class="poll-choice-value"): text percStr
span(class="poll-choice-option"): text poll.options[i]
span(class="poll-info"):
text &"{insertSep($poll.votes, ',')} votes • {poll.status}"
text insertSep($poll.votes, ',') & " votes • " & poll.status
proc renderCardImage(card: Card): VNode =
buildHtml(tdiv(class="card-image-container")):
tdiv(class="card-image"):
genImg(card.image)
img(src=getPicUrl(card.image), alt="")
if card.kind == player:
tdiv(class="card-overlay"):
tdiv(class="overlay-circle"):
@@ -179,12 +187,14 @@ func formatStat(stat: int): string =
if stat > 0: insertSep($stat, ',')
else: ""
proc renderStats(stats: TweetStats): VNode =
proc renderStats(stats: TweetStats; views: string): VNode =
buildHtml(tdiv(class="tweet-stats")):
span(class="tweet-stat"): icon "comment", formatStat(stats.replies)
span(class="tweet-stat"): icon "retweet", formatStat(stats.retweets)
span(class="tweet-stat"): icon "quote", formatStat(stats.quotes)
span(class="tweet-stat"): icon "heart", formatStat(stats.likes)
span(class="tweet-stat"): icon "views", formatStat(stats.views)
if views.len > 0:
span(class="tweet-stat"): icon "play", insertSep(views, ',')
proc renderReply(tweet: Tweet): VNode =
buildHtml(tdiv(class="replying-to")):
@@ -197,7 +207,8 @@ proc renderAttribution(user: User; prefs: Prefs): VNode =
buildHtml(a(class="attribution", href=("/" & user.username))):
renderMiniAvatar(user, prefs)
strong: text user.fullname
verifiedIcon(user)
if user.verified:
icon "ok", class="verified-icon", title="Verified account"
proc renderMediaTags(tags: seq[User]): VNode =
buildHtml(tdiv(class="media-tag-block")):
@@ -273,7 +284,7 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
divClass = "thread-last " & class
if not tweet.available:
return buildHtml(tdiv(class=divClass & "unavailable timeline-item", data-username=tweet.user.username)):
return buildHtml(tdiv(class=divClass & "unavailable timeline-item")):
tdiv(class="unavailable-box"):
if tweet.tombstone.len > 0:
text tweet.tombstone
@@ -285,22 +296,20 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
if tweet.quote.isSome:
renderQuote(tweet.quote.get(), prefs, path)
let
fullTweet = tweet
pinned = tweet.pinned
let fullTweet = tweet
var retweet: string
var tweet = fullTweet
if tweet.retweet.isSome:
tweet = tweet.retweet.get
retweet = fullTweet.user.fullname
buildHtml(tdiv(class=("timeline-item " & divClass), data-username=tweet.user.username)):
buildHtml(tdiv(class=("timeline-item " & divClass))):
if not mainTweet:
a(class="tweet-link", href=getLink(tweet))
tdiv(class="tweet-body"):
renderHeader(tweet, retweet, pinned, prefs)
var views = ""
renderHeader(tweet, retweet, prefs)
if not afterTweet and index == 0 and tweet.reply.len > 0 and
(tweet.reply.len > 1 or tweet.reply[0] != tweet.user.username):
@@ -316,15 +325,17 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
if tweet.attribution.isSome:
renderAttribution(tweet.attribution.get(), prefs)
if tweet.card.isSome and tweet.card.get().kind != hidden:
if tweet.card.isSome:
renderCard(tweet.card.get(), prefs, path)
if tweet.photos.len > 0:
renderAlbum(tweet)
elif tweet.video.isSome:
renderVideo(tweet.video.get(), prefs, path)
views = tweet.video.get().views
elif tweet.gif.isSome:
renderGif(tweet.gif.get(), prefs)
views = "GIF"
if tweet.poll.isSome:
renderPoll(tweet.poll.get())
@@ -333,24 +344,19 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
renderQuote(tweet.quote.get(), prefs, path)
if mainTweet:
p(class="tweet-published"): text &"{getTime(tweet)}"
p(class="tweet-published"): text getTime(tweet)
if tweet.mediaTags.len > 0:
renderMediaTags(tweet.mediaTags)
if not prefs.hideTweetStats:
renderStats(tweet.stats)
renderStats(tweet.stats, views)
if showThread:
a(class="show-thread", href=("/i/status/" & $tweet.threadId)):
text "Show this thread"
proc renderTweetEmbed*(tweet: Tweet; path: string; prefs: Prefs; cfg: Config; req: Request): string =
let node = buildHtml(html(lang="en")):
proc renderTweetEmbed*(tweet: Tweet; path: string; prefs: Prefs; cfg: Config; req: Request): VNode =
buildHtml(tdiv(class="tweet-embed")):
renderHead(prefs, cfg, req)
body:
tdiv(class="tweet-embed"):
renderTweet(tweet, prefs, path, mainTweet=true)
result = doctype & $node
renderTweet(tweet, prefs, path, mainTweet=true)

View File

@@ -1 +0,0 @@
seleniumbase

View File

@@ -3,6 +3,11 @@ from parameterized import parameterized
card = [
['Thom_Wolf/status/1122466524860702729',
'pytorch/fairseq',
'Facebook AI Research Sequence-to-Sequence Toolkit written in Python. - GitHub - pytorch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in Python.',
'github.com', True],
['nim_lang/status/1136652293510717440',
'Version 0.20.0 released',
'We are very proud to announce Nim version 0.20. This is a massive release, both literally and figuratively. It contains more than 1,000 commits and it marks our release candidate for version 1.0!',
@@ -11,29 +16,39 @@ card = [
['voidtarget/status/1094632512926605312',
'Basic OBS Studio plugin, written in nim, supporting C++ (C fine too)',
'Basic OBS Studio plugin, written in nim, supporting C++ (C fine too) - obsplugin.nim',
'gist.github.com', True]
'gist.github.com', True],
['FluentAI/status/1116417904831029248',
'Amazons Alexa isnt just AI — thousands of humans are listening',
'One of the only ways to improve Alexa is to have human beings check it for errors',
'theverge.com', True]
]
no_thumb = [
['FluentAI/status/1116417904831029248',
'LinkedIn',
'This link will take you to a page thats not on LinkedIn',
'lnkd.in'],
['Thom_Wolf/status/1122466524860702729',
'GitHub - facebookresearch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in',
'',
'github.com'],
['Bountysource/status/1141879700639215617',
'Post a bounty on kivy/plyer!',
'Automation and Screen Reader Support',
'bountysource.com'],
['brent_p/status/1088857328680488961',
'GitHub - brentp/hts-nim: nim wrapper for htslib for parsing genomics data files',
'',
'github.com'],
'Hts Nim Sugar',
'hts-nim is a library that allows one to use htslib via the nim programming language. Nim is a garbage-collected language that compiles to C and often has similar performance. I have become very...',
'brentp.github.io'],
['voidtarget/status/1133028231672582145',
'sinkingsugar/nimqt-example',
'A sample of a Qt app written using mostly nim. Contribute to sinkingsugar/nimqt-example development by creating an account on GitHub.',
'github.com']
'github.com'],
['mobile_test/status/490378953744318464',
'Nantasket Beach',
'Explore this photo titled Nantasket Beach by Ben Sandofsky (@sandofsky) on 500px',
'500px.com'],
['nim_lang/status/1082989146040340480',
'Nim in 2018: A short recap',
'Posted in r/programming by u/miran1',
'reddit.com']
]
playable = [
@@ -48,6 +63,17 @@ playable = [
'youtube.com']
]
# promo = [
# ['BangOlufsen/status/1145698701517754368',
# 'Upgrade your journey', '',
# 'www.bang-olufsen.com'],
# ['BangOlufsen/status/1154934429900406784',
# 'Learn more about Beosound Shape', '',
# 'www.bang-olufsen.com']
# ]
class CardTest(BaseTestCase):
@parameterized.expand(card)
def test_card(self, tweet, title, description, destination, large):
@@ -55,7 +81,7 @@ class CardTest(BaseTestCase):
c = Card(Conversation.main + " ")
self.assert_text(title, c.title)
self.assert_text(destination, c.destination)
self.assertIn('/pic/', self.get_image_url(c.image + ' img'))
self.assertIn('_img', self.get_image_url(c.image + ' img'))
if len(description) > 0:
self.assert_text(description, c.description)
if large:
@@ -78,7 +104,17 @@ class CardTest(BaseTestCase):
c = Card(Conversation.main + " ")
self.assert_text(title, c.title)
self.assert_text(destination, c.destination)
self.assertIn('/pic/', self.get_image_url(c.image + ' img'))
self.assertIn('_img', self.get_image_url(c.image + ' img'))
self.assert_element_visible('.card-overlay')
if len(description) > 0:
self.assert_text(description, c.description)
# @parameterized.expand(promo)
# def test_card_promo(self, tweet, title, description, destination):
# self.open_nitter(tweet)
# c = Card(Conversation.main + " ")
# self.assert_text(title, c.title)
# self.assert_text(destination, c.destination)
# self.assert_element_visible('.video-overlay')
# if len(description) > 0:
# self.assert_text(description, c.description)

View File

@@ -4,7 +4,7 @@ from parameterized import parameterized
profiles = [
['mobile_test', 'Test account',
'Test Account. test test Testing username with @mobile_test_2 and a #hashtag',
'San Francisco, CA', 'example.com/foobar', 'Joined October 2009', '97'],
'San Francisco, CA', 'example.com/foobar', 'Joined October 2009', '100'],
['mobile_test_2', 'mobile test 2', '', '', '', 'Joined January 2011', '13']
]
@@ -15,18 +15,11 @@ protected = [
['Poop', 'Randy', 'Social media fanatic.']
]
invalid = [['thisprofiledoesntexist']]
invalid = [['thisprofiledoesntexist'], ['%']]
malformed = [
['${userId}'],
['$%7BuserId%7D'], # URL encoded version
['%'], # Percent sign is invalid
['user@name'],
['user.name'],
['user-name'],
['user$name'],
['user{name}'],
['user name'], # space
banner_color = [
['nim_lang', '22, 25, 32'],
['rustlang', '35, 31, 32']
]
banner_image = [
@@ -77,16 +70,15 @@ class ProfileTest(BaseTestCase):
self.open_nitter(username)
self.assert_text(f'User "{username}" not found')
@parameterized.expand(malformed)
def test_malformed_username(self, username):
"""Test that malformed usernames (with invalid characters) return 404"""
self.open_nitter(username)
# Malformed usernames should return 404 page not found, not try to fetch from Twitter
self.assert_text('Page not found')
def test_suspended(self):
self.open_nitter('suspendme')
self.assert_text('User "suspendme" has been suspended')
self.open_nitter('user')
self.assert_text('User "user" has been suspended')
@parameterized.expand(banner_color)
def test_banner_color(self, username, color):
self.open_nitter(username)
banner = self.find_element(Profile.banner + ' a')
self.assertIn(color, banner.value_of_css_property('background-color'))
@parameterized.expand(banner_image)
def test_banner_image(self, username, url):

View File

@@ -2,13 +2,15 @@ from base import BaseTestCase, Quote, Conversation
from parameterized import parameterized
text = [
['nim_lang/status/1491461266849808397#m',
'Nim', '@nim_lang',
"""What's better than Nim 1.6.0?
['elonmusk/status/1138136540096319488',
'Tesla Owners Online', '@Model3Owners',
"""As of March 58.4% of new car sales in Norway are electric.
Nim 1.6.2 :)
What are we doing wrong? reuters.com/article/us-norwa…"""],
nim-lang.org/blog/2021/12/17…"""]
['nim_lang/status/924694255364341760',
'Hacker News', '@newsycombinator',
'Why Rust fails hard at scientific computing andre-ratsimbazafy.com/why-r…']
]
image = [

View File

@@ -2,8 +2,8 @@ from base import BaseTestCase
from parameterized import parameterized
#class SearchTest(BaseTestCase):
#@parameterized.expand([['@mobile_test'], ['@mobile_test_2']])
#def test_username_search(self, username):
#self.search_username(username)
#self.assert_text(f'{username}')
class SearchTest(BaseTestCase):
@parameterized.expand([['@mobile_test'], ['@mobile_test_2']])
def test_username_search(self, username):
self.search_username(username)
self.assert_text(f'{username}')

Some files were not shown because too many files have changed in this diff Show More