1
0
mirror of https://github.com/zedeus/nitter.git synced 2026-05-11 14:59:42 -04:00

1 Commits

Author SHA1 Message Date
Zed
c9b261a793 WIP tweets/timeline parser 2022-01-30 23:38:39 +01:00
125 changed files with 3172 additions and 8876 deletions

View File

@@ -1,4 +1,4 @@
name: Docker name: CI/CD
on: on:
push: push:
@@ -8,54 +8,31 @@ on:
- master - master
jobs: jobs:
tests: build-docker:
uses: ./.github/workflows/run-tests.yml runs-on: ubuntu-latest
secrets: inherit
build-docker-amd64:
needs: [tests]
runs-on: ubuntu-24.04
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
with:
platforms: all
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v1
with: with:
version: latest version: latest
- name: Login to DockerHub - name: Login to DockerHub
uses: docker/login-action@v3 uses: docker/login-action@v1
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }} password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push AMD64 Docker image - name: Build and push
uses: docker/build-push-action@v3 uses: docker/build-push-action@v2
with: with:
context: . context: .
file: ./Dockerfile file: ./Dockerfile
platforms: linux/amd64 platforms: linux/amd64
push: true push: true
tags: zedeus/nitter:latest,zedeus/nitter:${{ github.sha }} tags: zedeus/nitter:latest,zedeus/nitter:${{ github.sha }}
build-docker-arm64:
needs: [tests]
runs-on: ubuntu-24.04-arm
steps:
- uses: actions/checkout@v6
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
with:
version: latest
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push ARM64 Docker image
uses: docker/build-push-action@v3
with:
context: .
file: ./Dockerfile.arm64
platforms: linux/arm64
push: true
tags: zedeus/nitter:latest-arm64,zedeus/nitter:${{ github.sha }}-arm64

View File

@@ -1,145 +0,0 @@
name: Tests
on:
push:
paths-ignore:
- "*.md"
branches-ignore:
- master
workflow_call:
# Ensure that multiple runs on the same branch do not overlap.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
jobs:
build-test:
name: Build and test
runs-on: ubuntu-24.04
strategy:
matrix:
nim: ["2.0.x", "2.2.x", "devel"]
steps:
- name: Checkout Code
uses: actions/checkout@v6
- name: Cache Nimble Dependencies
id: cache-nimble
uses: actions/cache@v5
with:
path: ~/.nimble
key: ${{ matrix.nim }}-nimble-v2-${{ hashFiles('*.nimble') }}
restore-keys: |
${{ matrix.nim }}-nimble-v2-
- name: Setup Nim
uses: jiro4989/setup-nim-action@v2
with:
nim-version: ${{ matrix.nim }}
use-nightlies: true
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Project
run: nimble build -Y
- name: Upload 2.2.x build artifact
if: matrix.nim == '2.2.x'
uses: actions/upload-artifact@v6
with:
name: nitter-linux-nim-2.2.x-${{ github.sha }}
path: |
./nitter
if-no-files-found: error
integration-test:
needs: [build-test]
name: Integration test
runs-on: ubuntu-24.04
services:
redis:
image: redis:7
ports:
- 6379:6379
steps:
- name: Install runtime deps
run: |
sudo apt-get install -y --no-install-recommends libsass-dev libpcre3
- name: Checkout code
uses: actions/checkout@v6
- name: Cache pipx (poetry)
uses: actions/cache@v5
with:
path: |
~/.local/pipx
~/.local/bin
key: pipx-poetry-${{ runner.os }}
- name: Install poetry
env:
PIPX_HOME: ~/.local/pipx
PIPX_BIN_DIR: ~/.local/bin
run: command -v poetry >/dev/null 2>&1 || pipx install poetry
- name: Setup Python (3.14) with Poetry cache
uses: actions/setup-python@v6
with:
python-version: "3.14"
cache: poetry
cache-dependency-path: tests/poetry.lock
- name: Install Python deps
working-directory: tests
run: poetry sync
- name: Cache Nimble Dependencies
uses: actions/cache@v5
with:
path: ~/.nimble
key: 2.2.x-nimble-v2-${{ hashFiles('*.nimble') }}
restore-keys: |
2.2.x-nimble-v2-
- name: Setup Nim
uses: jiro4989/setup-nim-action@v2
with:
nim-version: 2.2.x
use-nightlies: true
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Download 2.2.x build artifact
uses: actions/download-artifact@v4
with:
name: nitter-linux-nim-2.2.x-${{ github.sha }}
path: .
- name: Make nitter binary executable
run: chmod +x ./nitter
- name: Prepare Nitter Environment
run: |
cp nitter.example.conf nitter.conf
sed -i 's/enableDebug = false/enableDebug = true/g' nitter.conf
sed -i 's/maxRetries = 1/maxRetries = 10/g' nitter.conf
# Run both Nimble tasks concurrently
nim r tools/rendermd.nim &
nim r tools/gencss.nim &
wait
echo '${{ secrets.SESSIONS }}' | head -n1
echo '${{ secrets.SESSIONS }}' > ./sessions.jsonl
- name: Run Tests
run: |
./nitter &
cd tests
poetry run pytest -n3 --reruns=5 --rs .

8
.gitignore vendored
View File

@@ -3,15 +3,9 @@ nitter
*.db *.db
/tests/__pycache__ /tests/__pycache__
/tests/geckodriver.log /tests/geckodriver.log
/tests/downloaded_files /tests/downloaded_files/*
/tests/latest_logs
/tools/gencss /tools/gencss
/tools/rendermd /tools/rendermd
/public/css/style.css /public/css/style.css
/public/md/*.html /public/md/*.html
nitter.conf nitter.conf
guest_accounts.json*
sessions.json*
dump.rdb
*.bak
/tools/*.json*

View File

@@ -1,5 +1,6 @@
FROM nimlang/nim:2.2.0-alpine-regular as nim FROM nimlang/nim:1.6.2-alpine-regular as nim
LABEL maintainer="setenforce@protonmail.com" LABEL maintainer="setenforce@protonmail.com"
EXPOSE 8080
RUN apk --no-cache add libsass-dev pcre RUN apk --no-cache add libsass-dev pcre
@@ -9,17 +10,14 @@ COPY nitter.nimble .
RUN nimble install -y --depsOnly RUN nimble install -y --depsOnly
COPY . . COPY . .
RUN nimble build -d:danger -d:lto -d:strip --mm:refc \ RUN nimble build -d:danger -d:lto -d:strip \
&& nimble scss \ && nimble scss \
&& nimble md && nimble md
FROM alpine:latest FROM alpine:latest
WORKDIR /src/ WORKDIR /src/
RUN apk --no-cache add pcre ca-certificates RUN apk --no-cache add pcre
COPY --from=nim /src/nitter/nitter ./ COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public COPY --from=nim /src/nitter/public ./public
EXPOSE 8080
RUN adduser -h /src/ -D -s /bin/sh nitter
USER nitter
CMD ./nitter CMD ./nitter

View File

@@ -1,25 +0,0 @@
FROM alpine:3.20.6 as nim
LABEL maintainer="setenforce@protonmail.com"
RUN apk --no-cache add libsass-dev pcre gcc git libc-dev nim nimble
WORKDIR /src/nitter
COPY nitter.nimble .
RUN nimble install -y --depsOnly
COPY . .
RUN nimble build -d:danger -d:lto -d:strip --mm:refc \
&& nimble scss \
&& nimble md
FROM alpine:3.20.6
WORKDIR /src/
RUN apk --no-cache add pcre ca-certificates openssl
COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public
EXPOSE 8080
RUN adduser -h /src/ -D -s /bin/sh nitter
USER nitter
CMD ./nitter

View File

@@ -1,38 +1,29 @@
# Nitter # Nitter
[![Test Matrix](https://github.com/zedeus/nitter/workflows/Tests/badge.svg)](https://github.com/zedeus/nitter/actions/workflows/run-tests.yml) [![Test Matrix](https://github.com/zedeus/nitter/workflows/CI/CD/badge.svg)](https://github.com/zedeus/nitter/actions?query=workflow%3ACI/CD)
[![Test Matrix](https://github.com/zedeus/nitter/workflows/Docker/badge.svg)](https://github.com/zedeus/nitter/actions/workflows/build-docker.yml)
[![License](https://img.shields.io/github/license/zedeus/nitter?style=flat)](#license) [![License](https://img.shields.io/github/license/zedeus/nitter?style=flat)](#license)
> [!NOTE]
> Running a Nitter instance now requires real accounts, since Twitter removed the previous methods. \
> This does not affect users. \
> For instructions on how to obtain session tokens, see [Creating session tokens](https://github.com/zedeus/nitter/wiki/Creating-session-tokens).
A free and open source alternative Twitter front-end focused on privacy and A free and open source alternative Twitter front-end focused on privacy and
performance. \ performance. \
Inspired by the [Invidious](https://github.com/iv-org/invidious) project. Inspired by the [Invidious](https://github.com/iv-org/invidious)
project.
- No JavaScript or ads - No JavaScript or ads
- All requests go through the backend, client never talks to Twitter - All requests go through the backend, client never talks to Twitter
- Prevents Twitter from tracking your IP or JavaScript fingerprint - Prevents Twitter from tracking your IP or JavaScript fingerprint
- Uses Twitter's unofficial API (no developer account required) - Uses Twitter's unofficial API (no rate limits or developer account required)
- Lightweight (for [@nim_lang](https://nitter.net/nim_lang), 60KB vs 784KB from twitter.com) - Lightweight (for [@nim_lang](https://nitter.net/nim_lang), 60KB vs 784KB from twitter.com)
- RSS feeds - RSS feeds
- Themes - Themes
- Mobile support (responsive design) - Mobile support (responsive design)
- AGPLv3 licensed, no proprietary instances permitted - AGPLv3 licensed, no proprietary instances permitted
<details> Liberapay: https://liberapay.com/zedeus \
<summary>Donations</summary> Patreon: https://patreon.com/nitter \
Liberapay: https://liberapay.com/zedeus<br> BTC: bc1qp7q4qz0fgfvftm5hwz3vy284nue6jedt44kxya \
Patreon: https://patreon.com/nitter<br> ETH: 0x66d84bc3fd031b62857ad18c62f1ba072b011925 \
BTC: bc1qpqpzjkcpgluhzf7x9yqe7jfe8gpfm5v08mdr55<br> LTC: ltc1qhsz5nxw6jw9rdtw9qssjeq2h8hqk2f85rdgpkr \
ETH: 0x24a0DB59A923B588c7A5EBd0dBDFDD1bCe9c4460<br> XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscWGWJCczFLe9RFhM3d1zpL
XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscWGWJCczFLe9RFhM3d1zpL<br>
SOL: ANsyGNXFo6osuFwr1YnUqif2RdoYRhc27WdyQNmmETSW<br>
ZEC: u1vndfqtzyy6qkzhkapxelel7ams38wmfeccu3fdpy2wkuc4erxyjm8ncjhnyg747x6t0kf0faqhh2hxyplgaum08d2wnj4n7cyu9s6zhxkqw2aef4hgd4s6vh5hpqvfken98rg80kgtgn64ff70djy7s8f839z00hwhuzlcggvefhdlyszkvwy3c7yw623vw3rvar6q6evd3xcvveypt
</details>
## Roadmap ## Roadmap
@@ -43,20 +34,19 @@ ZEC: u1vndfqtzyy6qkzhkapxelel7ams38wmfeccu3fdpy2wkuc4erxyjm8ncjhnyg747x6t0kf0faq
## Resources ## Resources
The wiki contains The wiki contains
[a list of instances](https://github.com/zedeus/nitter/wiki/Instances) and [a list of instances](https://github.com/zedeus/nitter/wiki/Instances) and
[browser extensions](https://github.com/zedeus/nitter/wiki/Extensions) [browser extensions](https://github.com/zedeus/nitter/wiki/Extensions)
maintained by the community. maintained by the community.
## Why? ## Why?
It's impossible to use Twitter without JavaScript enabled, and as of 2024 you It's impossible to use Twitter without JavaScript enabled. For privacy-minded
need to sign up. For privacy-minded folks, preventing JavaScript analytics and folks, preventing JavaScript analytics and IP-based tracking is important, but
IP-based tracking is important, but apart from using a VPN and uBlock/uMatrix, apart from using a VPN and uBlock/uMatrix, it's impossible. Despite being behind
it's impossible. Despite being behind a VPN and using heavy-duty adblockers, a VPN and using heavy-duty adblockers, you can get accurately tracked with your
you can get accurately tracked with your [browser's [browser's fingerprint](https://restoreprivacy.com/browser-fingerprinting/),
fingerprint](https://restoreprivacy.com/browser-fingerprinting/), [no [no JavaScript required](https://noscriptfingerprint.com/). This all became
JavaScript required](https://noscriptfingerprint.com/). This all became
particularly important after Twitter [removed the particularly important after Twitter [removed the
ability](https://www.eff.org/deeplinks/2020/04/twitter-removes-privacy-option-and-shows-why-we-need-strong-privacy-laws) ability](https://www.eff.org/deeplinks/2020/04/twitter-removes-privacy-option-and-shows-why-we-need-strong-privacy-laws)
for users to control whether their data gets sent to advertisers. for users to control whether their data gets sent to advertisers.
@@ -77,24 +67,21 @@ Twitter account.
## Installation ## Installation
### Dependencies ### Dependencies
* libpcre
- libpcre * libsass
- libsass * redis
- redis/valkey
To compile Nitter you need a Nim installation, see To compile Nitter you need a Nim installation, see
[nim-lang.org](https://nim-lang.org/install.html) for details. It is possible [nim-lang.org](https://nim-lang.org/install.html) for details. It is possible to
to install it system-wide or in the user directory you create below. install it system-wide or in the user directory you create below.
To compile the scss files, you need to install `libsass`. On Ubuntu and Debian, To compile the scss files, you need to install `libsass`. On Ubuntu and Debian,
you can use `libsass-dev`. you can use `libsass-dev`.
Redis is required for caching and in the future for account info. As of 2024 Redis is required for caching and in the future for account info. It should be
Redis is no longer open source, so using the fork Valkey is recommended. It available on most distros as `redis` or `redis-server` (Ubuntu/Debian).
should be available on most distros as `redis` or `redis-server` Running it with the default config is fine, Nitter's default config is set to
(Ubuntu/Debian), or `valkey`/`valkey-server`. Running it with the default use the default Redis port and localhost.
config is fine, Nitter's default config is set to use the default port and
localhost.
Here's how to create a `nitter` user, clone the repo, and build the project Here's how to create a `nitter` user, clone the repo, and build the project
along with the scss and md files. along with the scss and md files.
@@ -104,7 +91,7 @@ along with the scss and md files.
# su nitter # su nitter
$ git clone https://github.com/zedeus/nitter $ git clone https://github.com/zedeus/nitter
$ cd nitter $ cd nitter
$ nimble build -d:danger --mm:refc $ nimble build -d:release
$ nimble scss $ nimble scss
$ nimble md $ nimble md
$ cp nitter.example.conf nitter.conf $ cp nitter.example.conf nitter.conf
@@ -121,32 +108,25 @@ performance reasons.
### Docker ### Docker
Page for the Docker image: https://hub.docker.com/r/zedeus/nitter #### NOTE: For ARM64/ARM support, please use [unixfox's image](https://quay.io/repository/unixfox/nitter?tab=tags), more info [here](https://github.com/zedeus/nitter/issues/399#issuecomment-997263495)
#### NOTE: For ARM64 support, please use the separate ARM64 docker image: [`zedeus/nitter:latest-arm64`](https://hub.docker.com/r/zedeus/nitter/tags).
To run Nitter with Docker, you'll need to install and run Redis separately To run Nitter with Docker, you'll need to install and run Redis separately
before you can run the container. See below for how to also run Redis using before you can run the container. See below for how to also run Redis using
Docker. Docker.
To build and run Nitter in Docker: To build and run Nitter in Docker:
```bash ```bash
docker build -t nitter:latest . docker build -t nitter:latest .
docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host nitter:latest docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host nitter:latest
``` ```
Note: For ARM64, use this Dockerfile: [`Dockerfile.arm64`](https://github.com/zedeus/nitter/blob/master/Dockerfile.arm64).
A prebuilt Docker image is provided as well: A prebuilt Docker image is provided as well:
```bash ```bash
docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host zedeus/nitter:latest docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host zedeus/nitter:latest
``` ```
Using docker-compose to run both Nitter and Redis as different containers: Using docker-compose to run both Nitter and Redis as different containers:
Change `redisHost` from `localhost` to `nitter-redis` in `nitter.conf`, then run: Change `redisHost` from `localhost` to `nitter-redis` in `nitter.conf`, then run:
```bash ```bash
docker-compose up -d docker-compose up -d
``` ```

View File

@@ -1,13 +1,17 @@
--define:ssl --define:ssl
--define:useStdLib --define:useStdLib
--threads:off
# workaround httpbeast file upload bug # workaround httpbeast file upload bug
--assertions:off --assertions:off
# disable annoying warnings # disable annoying warnings
warning("GcUnsafe2", off) warning("GcUnsafe2", off)
warning("HoleEnumConv", off)
hint("XDeclaredButNotUsed", off) hint("XDeclaredButNotUsed", off)
hint("XCannotRaiseY", off) hint("XCannotRaiseY", off)
hint("User", off) hint("User", off)
const
nimVersion = (major: NimMajor, minor: NimMinor, patch: NimPatch)
when nimVersion >= (1, 6, 0):
warning("HoleEnumConv", off)

View File

@@ -8,22 +8,10 @@ services:
ports: ports:
- "127.0.0.1:8080:8080" # Replace with "8080:8080" if you don't use a reverse proxy - "127.0.0.1:8080:8080" # Replace with "8080:8080" if you don't use a reverse proxy
volumes: volumes:
- ./nitter.conf:/src/nitter.conf:Z,ro - ./nitter.conf:/src/nitter.conf:ro
- ./sessions.jsonl:/src/sessions.jsonl:Z,ro # Run get_sessions.py to get the credentials
depends_on: depends_on:
- nitter-redis - nitter-redis
restart: unless-stopped restart: unless-stopped
healthcheck:
test: wget -nv --tries=1 --spider http://127.0.0.1:8080/Jack/status/20 || exit 1
interval: 30s
timeout: 5s
retries: 2
user: "998:998"
read_only: true
security_opt:
- no-new-privileges:true
cap_drop:
- ALL
nitter-redis: nitter-redis:
image: redis:6-alpine image: redis:6-alpine
@@ -32,17 +20,6 @@ services:
volumes: volumes:
- nitter-redis:/data - nitter-redis:/data
restart: unless-stopped restart: unless-stopped
healthcheck:
test: redis-cli ping
interval: 30s
timeout: 5s
retries: 2
user: "999:1000"
read_only: true
security_opt:
- no-new-privileges:true
cap_drop:
- ALL
volumes: volumes:
nitter-redis: nitter-redis:

View File

@@ -1,48 +1,45 @@
[Server] [Server]
hostname = "nitter.net" # for generating links, change this to your own domain/ip
title = "nitter"
address = "0.0.0.0" address = "0.0.0.0"
port = 8080 port = 8080
https = false # disable to enable cookies when not using https https = false # disable to enable cookies when not using https
httpMaxConnections = 100 httpMaxConnections = 100
staticDir = "./public" staticDir = "./public"
title = "nitter"
hostname = "nitter.net"
[Cache] [Cache]
listMinutes = 240 # how long to cache list info (not the tweets, so keep it high) listMinutes = 240 # how long to cache list info (not the tweets, so keep it high)
rssMinutes = 10 # how long to cache rss queries rssMinutes = 10 # how long to cache rss queries
redisHost = "localhost" # Change to "nitter-redis" if using docker-compose redisHost = "localhost" # Change to "nitter-redis" if using docker-compose
redisPort = 6379 redisPort = 6379
redisPassword = "" redisPassword = ""
redisConnections = 20 # minimum open connections in pool redisConnections = 20 # connection pool size
redisMaxConnections = 30 redisMaxConnections = 30
# new connections are opened when none are available, but if the pool size # max, new connections are opened when none are available, but if the pool size
# goes above this, they're closed when released. don't worry about this unless # goes above this, they're closed when released. don't worry about this unless
# you receive tons of requests per second # you receive tons of requests per second
[Config] [Config]
hmacKey = "secretkey" # random key for cryptographic signing of video urls hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # master switch, set to false to disable all RSS feeds enableRSS = true # set this to false to disable RSS feeds
enableRSSUserTweets = true # /@user/rss enableDebug = false # enable request logs and debug endpoints
enableRSSUserReplies = true # /@user/with_replies/rss proxy = "" # http/https url, SOCKS proxies are not supported
enableRSSUserMedia = true # /@user/media/rss
enableRSSSearch = true # /search/rss and /@user/search/rss
enableRSSList = true # list RSS feeds
enableDebug = false # enable request logs and debug endpoints (/.sessions)
proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = "" proxyAuth = ""
apiProxy = "" # nitter-proxy host, e.g. localhost:7000 tokenCount = 10
disableTid = false # enable this if cookie-based auth is failing # minimum amount of usable tokens. tokens are used to authorize API requests,
maxConcurrentReqs = 2 # max requests at a time per session to avoid race conditions # but they expire after ~1 hour, and have a limit of 187 requests.
maxRetries = 1 # max number of retries on rate limit errors # the limit gets reset every 15 minutes, and the pool is filled up so there's
retryDelayMs = 150 # delay in ms between retries # always at least $tokenCount usable tokens. again, only increase this if
# you receive major bursts all the time
# Change default preferences here, see src/prefs_impl.nim for a complete list # Change default preferences here, see src/prefs_impl.nim for a complete list
[Preferences] [Preferences]
theme = "Nitter" theme = "Nitter"
replaceTwitter = "nitter.net" replaceTwitter = "nitter.net"
replaceYouTube = "piped.video" replaceYouTube = "piped.kavin.rocks"
replaceReddit = "teddit.net" replaceReddit = "teddit.net"
replaceInstagram = ""
proxyVideos = true proxyVideos = true
hlsPlayback = false hlsPlayback = false
infiniteScroll = false infiniteScroll = false

View File

@@ -10,25 +10,25 @@ bin = @["nitter"]
# Dependencies # Dependencies
requires "nim >= 2.0.0" requires "nim >= 1.4.8"
requires "jester#baca3f" requires "jester >= 0.5.0"
requires "karax#5cf360c" requires "karax#c71bc92"
requires "sass#7dfdd03" requires "sass#e683aa1"
requires "nimcrypto#a079df9" requires "nimcrypto#a5742a9"
requires "markdown#158efe3" requires "markdown#abdbe5e"
requires "packedjson#9e6fbb6" requires "packedjson#d11d167"
requires "supersnappy#6c94198" requires "supersnappy#2.1.1"
requires "redpool#8b7c1db" requires "redpool#8b7c1db"
requires "https://github.com/zedeus/redis#d0a0e6f" requires "https://github.com/zedeus/redis#d0a0e6f"
requires "zippy#ca5989a" requires "zippy#0.7.3"
requires "flatty#e668085" requires "flatty#0.2.3"
requires "jsony#1de1f08" requires "jsony#d0e69bd"
requires "oauth#b8c163b"
# Tasks # Tasks
task scss, "Generate css": task scss, "Generate css":
exec "nim r --hint[Processing]:off tools/gencss" exec "nimble c --hint[Processing]:off -d:danger -r tools/gencss"
task md, "Render md": task md, "Render md":
exec "nim r --hint[Processing]:off tools/rendermd" exec "nimble c --hint[Processing]:off -d:danger -r tools/rendermd"

View File

@@ -1,148 +1,53 @@
@font-face { @font-face {
font-family: "fontello"; font-family: 'fontello';
src: url("/fonts/fontello.eot?49059696"); src: url('/fonts/fontello.eot?21002321');
src: src: url('/fonts/fontello.eot?21002321#iefix') format('embedded-opentype'),
url("/fonts/fontello.eot?49059696#iefix") format("embedded-opentype"), url('/fonts/fontello.woff2?21002321') format('woff2'),
url("/fonts/fontello.woff2?49059696") format("woff2"), url('/fonts/fontello.woff?21002321') format('woff'),
url("/fonts/fontello.woff?49059696") format("woff"), url('/fonts/fontello.ttf?21002321') format('truetype'),
url("/fonts/fontello.ttf?49059696") format("truetype"), url('/fonts/fontello.svg?21002321#fontello') format('svg');
url("/fonts/fontello.svg?49059696#fontello") format("svg");
font-weight: normal; font-weight: normal;
font-style: normal; font-style: normal;
} }
[class^="icon-"]:before, [class^="icon-"]:before, [class*=" icon-"]:before {
[class*=" icon-"]:before {
font-family: "fontello"; font-family: "fontello";
font-style: normal; font-style: normal;
font-weight: normal; font-weight: normal;
speak: never; speak: never;
display: inline-block; display: inline-block;
text-decoration: inherit; text-decoration: inherit;
width: 1em; width: 1em;
margin-right: 0.2em;
text-align: center; text-align: center;
/* For safety - reset parent styles, that can break glyph codes*/ /* For safety - reset parent styles, that can break glyph codes*/
font-variant: normal; font-variant: normal;
text-transform: none; text-transform: none;
/* fix buttons height, for twitter bootstrap */ /* fix buttons height, for twitter bootstrap */
line-height: 1em; line-height: 1em;
/* Font smoothing. That was taken from TWBS */ /* Font smoothing. That was taken from TWBS */
-webkit-font-smoothing: antialiased; -webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale; -moz-osx-font-smoothing: grayscale;
} }
.icon-views:before { .icon-heart:before { content: '\2665'; } /* '♥' */
content: "\e800"; .icon-quote:before { content: '\275e'; } /* '❞' */
} .icon-comment:before { content: '\e802'; } /* '' */
.icon-ok:before { content: '\e803'; } /* '' */
/* '' */ .icon-play:before { content: '\e804'; } /* '' */
.icon-heart:before { .icon-link:before { content: '\e805'; } /* '' */
content: "\e801"; .icon-calendar:before { content: '\e806'; } /* '' */
} .icon-location:before { content: '\e807'; } /* '' */
.icon-picture:before { content: '\e809'; } /* '' */
/* '' */ .icon-lock:before { content: '\e80a'; } /* '' */
.icon-quote:before { .icon-down:before { content: '\e80b'; } /* '' */
content: "\e802"; .icon-retweet:before { content: '\e80d'; } /* '' */
} .icon-search:before { content: '\e80e'; } /* '' */
.icon-pin:before { content: '\e80f'; } /* '' */
/* '' */ .icon-cog:before { content: '\e812'; } /* '' */
.icon-comment:before { .icon-rss-feed:before { content: '\e813'; } /* '' */
content: "\e803"; .icon-info:before { content: '\f128'; } /* '' */
} .icon-bird:before { content: '\f309'; } /* '' */
/* '' */
.icon-group:before {
content: "\e804";
}
/* '' */
.icon-play:before {
content: "\e805";
}
/* '' */
.icon-link:before {
content: "\e806";
}
/* '' */
.icon-calendar:before {
content: "\e807";
}
/* '' */
.icon-location:before {
content: "\e808";
}
/* '' */
.icon-picture:before {
content: "\e809";
}
/* '' */
.icon-lock:before {
content: "\e80a";
}
/* '' */
.icon-down:before {
content: "\e80b";
}
/* '' */
.icon-retweet:before {
content: "\e80c";
}
/* '' */
.icon-search:before {
content: "\e80d";
}
/* '' */
.icon-pin:before {
content: "\e80e";
}
/* '' */
.icon-cog:before {
content: "\e80f";
}
/* '' */
.icon-rss:before {
content: "\e810";
}
/* '' */
.icon-ok:before {
content: "\e811";
}
/* '' */
.icon-attention:before {
content: "\e812";
}
/* '' */
.icon-circle:before {
content: "\f111";
}
/* '' */
.icon-info:before {
content: "\f128";
}
/* '' */
.icon-bird:before {
content: "\f309";
}
/* '' */

View File

@@ -1,41 +0,0 @@
body {
--bg_color: #282a36;
--fg_color: #f8f8f2;
--fg_faded: #818eb6;
--fg_dark: var(--fg_faded);
--fg_nav: var(--accent);
--bg_panel: #343746;
--bg_elements: #292b36;
--bg_overlays: #44475a;
--bg_hover: #2f323f;
--grey: var(--fg_faded);
--dark_grey: #44475a;
--darker_grey: #3d4051;
--darkest_grey: #363948;
--border_grey: #44475a;
--accent: #bd93f9;
--accent_light: #caa9fa;
--accent_dark: var(--accent);
--accent_border: #ff79c696;
--play_button: #ffb86c;
--play_button_hover: #ffc689;
--more_replies_dots: #bd93f9;
--error_red: #ff5555;
--verified_blue: var(--accent);
--icon_text: ##F8F8F2;
--tab: #6272a4;
--tab_selected: var(--accent);
--profile_stat: #919cbf;
}
.search-bar > form input::placeholder{
color: var(--fg_faded);
}

View File

@@ -1,15 +1,6 @@
Font license info Font license info
## Modern Pictograms
Copyright (c) 2012 by John Caserta. All rights reserved.
Author: John Caserta
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://thedesignoffice.org/project/modern-pictograms/
## Entypo ## Entypo
Copyright (C) 2012 by Daniel Bruce Copyright (C) 2012 by Daniel Bruce
@@ -46,3 +37,12 @@ Font license info
Homepage: http://aristeides.com/ Homepage: http://aristeides.com/
## Modern Pictograms
Copyright (c) 2012 by John Caserta. All rights reserved.
Author: John Caserta
License: SIL (http://scripts.sil.org/OFL)
Homepage: http://thedesignoffice.org/project/modern-pictograms/

Binary file not shown.

View File

@@ -1,28 +1,26 @@
<?xml version="1.0" standalone="no"?> <?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg"> <svg xmlns="http://www.w3.org/2000/svg">
<metadata>Copyright (C) 2026 by original authors @ fontello.com</metadata> <metadata>Copyright (C) 2020 by original authors @ fontello.com</metadata>
<defs> <defs>
<font id="fontello" horiz-adv-x="1000" > <font id="fontello" horiz-adv-x="1000" >
<font-face font-family="fontello" font-weight="400" font-stretch="normal" units-per-em="1000" ascent="850" descent="-150" /> <font-face font-family="fontello" font-weight="400" font-stretch="normal" units-per-em="1000" ascent="850" descent="-150" />
<missing-glyph horiz-adv-x="1000" /> <missing-glyph horiz-adv-x="1000" />
<glyph glyph-name="views" unicode="&#xe800;" d="M180 516l0-538-180 0 0 538 180 0z m250-138l0-400-180 0 0 400 180 0z m250 344l0-744-180 0 0 744 180 0z" horiz-adv-x="680" /> <glyph glyph-name="heart" unicode="&#x2665;" d="M790 644q70-64 70-156t-70-158l-360-330-360 330q-70 66-70 158t70 156q62 58 151 58t153-58l56-52 58 52q62 58 150 58t152-58z" horiz-adv-x="860" />
<glyph glyph-name="heart" unicode="&#xe801;" d="M790 644q70-64 70-156t-70-158l-360-330-360 330q-70 66-70 158t70 156q62 58 151 58t153-58l56-52 58 52q62 58 150 58t152-58z" horiz-adv-x="860" /> <glyph glyph-name="quote" unicode="&#x275e;" d="M18 685l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z m558 0l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z" horiz-adv-x="928" />
<glyph glyph-name="quote" unicode="&#xe802;" d="M18 685l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z m558 0l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z" horiz-adv-x="928" /> <glyph glyph-name="comment" unicode="&#xe802;" d="M1000 350q0-97-67-179t-182-130-251-48q-39 0-81 4-110-97-257-135-27-8-63-12-10-1-17 5t-10 16v1q-2 2 0 6t1 6 2 5l4 5t4 5 4 5q4 5 17 19t20 22 17 22 18 28 15 33 15 42q-88 50-138 123t-51 157q0 73 40 139t106 114 160 76 194 28q136 0 251-48t182-130 67-179z" horiz-adv-x="1000" />
<glyph glyph-name="comment" unicode="&#xe803;" d="M1000 350q0-97-67-179t-182-130-251-48q-39 0-81 4-110-97-257-135-27-8-63-12-10-1-17 5t-10 16v1q-2 2 0 6t1 6 2 5l4 5t4 5 4 5q4 5 17 19t20 22 17 22 18 28 15 33 15 42q-88 50-138 123t-51 157q0 73 40 139t106 114 160 76 194 28q136 0 251-48t182-130 67-179z" horiz-adv-x="1000" /> <glyph glyph-name="ok" unicode="&#xe803;" d="M0 260l162 162 166-164 508 510 164-164-510-510-162-162-162 164z" horiz-adv-x="1000" />
<glyph glyph-name="group" unicode="&#xe804;" d="M0 106l0 134q0 26 18 32l171 80q-66 39-68 131 0 56 35 103 37 41 90 43 31 0 63-19-49-125 23-237-12-11-25-19l-114-55q-48-23-52-84l0-143-114 0q-25 0-27 34z m193-59l0 168q0 27 22 37l152 70 57 28q-37 23-60 66t-22 94q0 76 46 130t110 54 109-54 45-130q0-105-78-158l61-30 146-70q24-10 24-37l0-168q-2-37-37-41l-541 0q-14 2-24 14t-10 27z m473 330q68 106 22 231 31 19 66 21 49 0 90-43 35-41 35-103 0-82-65-131l168-80q18-10 18-32l0-134q0-32-27-34l-118 0 0 143q0 57-50 84l-110 53q-15 8-29 25z" horiz-adv-x="1000" /> <glyph glyph-name="play" unicode="&#xe804;" d="M772 333l-741-412q-13-7-22-2t-9 20v822q0 14 9 20t22-2l741-412q13-7 13-17t-13-17z" horiz-adv-x="785.7" />
<glyph glyph-name="play" unicode="&#xe805;" d="M772 333l-741-412q-13-7-22-2t-9 20v822q0 14 9 20t22-2l741-412q13-7 13-17t-13-17z" horiz-adv-x="785.7" /> <glyph glyph-name="link" unicode="&#xe805;" d="M294 116q14 14 34 14t36-14q32-34 0-70l-42-40q-56-56-132-56-78 0-134 56t-56 132q0 78 56 134l148 148q70 68 144 77t128-43q16-16 16-36t-16-36q-36-32-70 0-50 48-132-34l-148-146q-26-26-26-64t26-62q26-26 63-26t63 26z m450 574q56-56 56-132 0-78-56-134l-158-158q-74-72-150-72-62 0-112 50-14 14-14 34t14 36q14 14 35 14t35-14q50-48 122 24l158 156q28 28 28 64 0 38-28 62-24 26-56 31t-60-21l-50-50q-16-14-36-14t-34 14q-34 34 0 70l50 50q54 54 127 51t129-61z" horiz-adv-x="800" />
<glyph glyph-name="link" unicode="&#xe806;" d="M294 116q14 14 34 14t36-14q32-34 0-70l-42-40q-56-56-132-56-78 0-134 56t-56 132q0 78 56 134l148 148q70 68 144 77t128-43q16-16 16-36t-16-36q-36-32-70 0-50 48-132-34l-148-146q-26-26-26-64t26-62q26-26 63-26t63 26z m450 574q56-56 56-132 0-78-56-134l-158-158q-74-72-150-72-62 0-112 50-14 14-14 34t14 36q14 14 35 14t35-14q50-48 122 24l158 156q28 28 28 64 0 38-28 62-24 26-56 31t-60-21l-50-50q-16-14-36-14t-34 14q-34 34 0 70l50 50q54 54 127 51t129-61z" horiz-adv-x="800" /> <glyph glyph-name="calendar" unicode="&#xe806;" d="M800 700q42 0 71-29t29-71l0-600q0-40-29-70t-71-30l-700 0q-40 0-70 30t-30 70l0 600q0 42 30 71t70 29l46 0 0-100 160 0 0 100 290 0 0-100 160 0 0 100 44 0z m0-700l0 400-700 0 0-400 700 0z m-540 800l0-170-70 0 0 170 70 0z m450 0l0-170-70 0 0 170 70 0z" horiz-adv-x="900" />
<glyph glyph-name="calendar" unicode="&#xe807;" d="M800 700q42 0 71-29t29-71l0-600q0-40-29-70t-71-30l-700 0q-40 0-70 30t-30 70l0 600q0 42 30 71t70 29l46 0 0-100 160 0 0 100 290 0 0-100 160 0 0 100 44 0z m0-700l0 400-700 0 0-400 700 0z m-540 800l0-170-70 0 0 170 70 0z m450 0l0-170-70 0 0 170 70 0z" horiz-adv-x="900" /> <glyph glyph-name="location" unicode="&#xe807;" d="M250 750q104 0 177-73t73-177q0-106-62-243t-126-223l-62-84q-10 12-27 35t-60 89-76 130-60 147-27 149q0 104 73 177t177 73z m0-388q56 0 96 40t40 96-40 95-96 39-95-39-39-95 39-96 95-40z" horiz-adv-x="500" />
<glyph glyph-name="location" unicode="&#xe808;" d="M250 750q104 0 177-73t73-177q0-106-62-243t-126-223l-62-84q-10 12-27 35t-60 89-76 130-60 147-27 149q0 104 73 177t177 73z m0-388q56 0 96 40t40 96-40 95-96 39-95-39-39-95 39-96 95-40z" horiz-adv-x="500" />
<glyph glyph-name="picture" unicode="&#xe809;" d="M357 529q0-45-31-76t-76-32-76 32-31 76 31 76 76 31 76-31 31-76z m572-215v-250h-786v107l178 179 90-89 285 285z m53 393h-893q-7 0-12-5t-6-13v-678q0-7 6-13t12-5h893q7 0 13 5t5 13v678q0 8-5 13t-13 5z m89-18v-678q0-37-26-63t-63-27h-893q-36 0-63 27t-26 63v678q0 37 26 63t63 27h893q37 0 63-27t26-63z" horiz-adv-x="1071.4" /> <glyph glyph-name="picture" unicode="&#xe809;" d="M357 529q0-45-31-76t-76-32-76 32-31 76 31 76 76 31 76-31 31-76z m572-215v-250h-786v107l178 179 90-89 285 285z m53 393h-893q-7 0-12-5t-6-13v-678q0-7 6-13t12-5h893q7 0 13 5t5 13v678q0 8-5 13t-13 5z m89-18v-678q0-37-26-63t-63-27h-893q-36 0-63 27t-26 63v678q0 37 26 63t63 27h893q37 0 63-27t26-63z" horiz-adv-x="1071.4" />
@@ -30,25 +28,19 @@
<glyph glyph-name="down" unicode="&#xe80b;" d="M939 399l-414-413q-10-11-25-11t-25 11l-414 413q-11 11-11 26t11 25l93 92q10 11 25 11t25-11l296-296 296 296q11 11 25 11t26-11l92-92q11-11 11-25t-11-26z" horiz-adv-x="1000" /> <glyph glyph-name="down" unicode="&#xe80b;" d="M939 399l-414-413q-10-11-25-11t-25 11l-414 413q-11 11-11 26t11 25l93 92q10 11 25 11t25-11l296-296 296 296q11 11 25 11t26-11l92-92q11-11 11-25t-11-26z" horiz-adv-x="1000" />
<glyph glyph-name="retweet" unicode="&#xe80c;" d="M714 11q0-7-5-13t-13-5h-535q-5 0-8 1t-5 4-3 4-2 7 0 6v335h-107q-15 0-25 11t-11 25q0 13 8 23l179 214q11 12 27 12t28-12l178-214q9-10 9-23 0-15-11-25t-25-11h-107v-214h321q9 0 14-6l89-108q4-5 4-11z m357 232q0-13-8-23l-178-214q-12-13-28-13t-27 13l-179 214q-8 10-8 23 0 14 11 25t25 11h107v214h-322q-9 0-14 7l-89 107q-4 5-4 11 0 7 5 12t13 6h536q4 0 7-1t5-4 3-5 2-6 1-7v-334h107q14 0 25-11t10-25z" horiz-adv-x="1071.4" /> <glyph glyph-name="retweet" unicode="&#xe80d;" d="M714 11q0-7-5-13t-13-5h-535q-5 0-8 1t-5 4-3 4-2 7 0 6v335h-107q-15 0-25 11t-11 25q0 13 8 23l179 214q11 12 27 12t28-12l178-214q9-10 9-23 0-15-11-25t-25-11h-107v-214h321q9 0 14-6l89-108q4-5 4-11z m357 232q0-13-8-23l-178-214q-12-13-28-13t-27 13l-179 214q-8 10-8 23 0 14 11 25t25 11h107v214h-322q-9 0-14 7l-89 107q-4 5-4 11 0 7 5 12t13 6h536q4 0 7-1t5-4 3-5 2-6 1-7v-334h107q14 0 25-11t10-25z" horiz-adv-x="1071.4" />
<glyph glyph-name="search" unicode="&#xe80d;" d="M772 78q30-34 6-62l-46-46q-36-32-68 0l-190 190q-74-42-156-42-128 0-223 95t-95 223 90 219 218 91 224-95 96-223q0-88-46-162z m-678 358q0-88 68-156t156-68 151 63 63 153q0 88-68 155t-156 67-151-63-63-151z" horiz-adv-x="789" /> <glyph glyph-name="search" unicode="&#xe80e;" d="M772 78q30-34 6-62l-46-46q-36-32-68 0l-190 190q-74-42-156-42-128 0-223 95t-95 223 90 219 218 91 224-95 96-223q0-88-46-162z m-678 358q0-88 68-156t156-68 151 63 63 153q0 88-68 155t-156 67-151-63-63-151z" horiz-adv-x="789" />
<glyph glyph-name="pin" unicode="&#xe80e;" d="M268 368v250q0 8-5 13t-13 5-13-5-5-13v-250q0-8 5-13t13-5 13 5 5 13z m375-197q0-14-11-25t-25-10h-239l-29-270q-1-7-6-11t-11-5h-1q-15 0-17 15l-43 271h-225q-15 0-25 10t-11 25q0 69 44 124t99 55v286q-29 0-50 21t-22 50 22 50 50 22h357q29 0 50-22t21-50-21-50-50-21v-286q55 0 99-55t44-124z" horiz-adv-x="642.9" /> <glyph glyph-name="pin" unicode="&#xe80f;" d="M268 368v250q0 8-5 13t-13 5-13-5-5-13v-250q0-8 5-13t13-5 13 5 5 13z m375-197q0-14-11-25t-25-10h-239l-29-270q-1-7-6-11t-11-5h-1q-15 0-17 15l-43 271h-225q-15 0-25 10t-11 25q0 69 44 124t99 55v286q-29 0-50 21t-22 50 22 50 50 22h357q29 0 50-22t21-50-21-50-50-21v-286q55 0 99-55t44-124z" horiz-adv-x="642.9" />
<glyph glyph-name="cog" unicode="&#xe80f;" d="M911 295l-133-56q-8-22-12-31l55-133-79-79-135 53q-9-4-31-12l-55-134-112 0-56 133q-11 4-33 13l-132-55-78 79 53 134q-1 3-4 9t-6 12-4 11l-131 55 0 112 131 56 14 33-54 132 78 79 133-54q22 9 33 13l55 132 112 0 56-132q14-5 31-13l133 55 80-79-54-135q6-12 12-30l133-56 0-112z m-447-111q69 0 118 48t49 118-49 119-118 50-119-50-49-119 49-118 119-48z" horiz-adv-x="928" /> <glyph glyph-name="cog" unicode="&#xe812;" d="M911 295l-133-56q-8-22-12-31l55-133-79-79-135 53q-9-4-31-12l-55-134-112 0-56 133q-11 4-33 13l-132-55-78 79 53 134q-1 3-4 9t-6 12-4 11l-131 55 0 112 131 56 14 33-54 132 78 79 133-54q22 9 33 13l55 132 112 0 56-132q14-5 31-13l133 55 80-79-54-135q6-12 12-30l133-56 0-112z m-447-111q69 0 118 48t49 118-49 119-118 50-119-50-49-119 49-118 119-48z" horiz-adv-x="928" />
<glyph glyph-name="rss" unicode="&#xe810;" d="M184 93c0-51-43-91-93-91s-91 40-91 91c0 50 41 91 91 91s93-41 93-91z m261-85l-125 0c0 174-140 323-315 323l0 118c231 0 440-163 440-441z m259 0l-136 0c0 300-262 561-563 561l0 129c370 0 699-281 699-690z" horiz-adv-x="704" /> <glyph glyph-name="rss-feed" unicode="&#xe813;" d="M184 93c0-51-43-91-93-91s-91 40-91 91c0 50 41 91 91 91s93-41 93-91z m261-85l-125 0c0 174-140 323-315 323l0 118c231 0 440-163 440-441z m259 0l-136 0c0 300-262 561-563 561l0 129c370 0 699-281 699-690z" horiz-adv-x="704" />
<glyph glyph-name="ok" unicode="&#xe811;" d="M933 534q0-22-16-38l-404-404-76-76q-16-15-38-15t-38 15l-76 76-202 202q-15 16-15 38t15 38l76 76q16 16 38 16t38-16l164-165 366 367q16 16 38 16t38-16l76-76q16-15 16-38z" horiz-adv-x="1000" />
<glyph glyph-name="attention-circled" unicode="&#xe812;" d="M429 779q116 0 215-58t156-156 57-215-57-215-156-156-215-58-216 58-155 156-58 215 58 215 155 156 216 58z m71-696v106q0 8-5 13t-12 5h-107q-8 0-13-5t-6-13v-106q0-8 6-13t13-6h107q7 0 12 6t5 13z m-1 192l10 346q0 7-6 10-5 5-13 5h-123q-8 0-13-5-6-3-6-10l10-346q0-6 5-10t14-4h103q8 0 13 4t6 10z" horiz-adv-x="857.1" />
<glyph glyph-name="circle" unicode="&#xf111;" d="M857 350q0-117-57-215t-156-156-215-58-216 58-155 156-58 215 58 215 155 156 216 58 215-58 156-156 57-215z" horiz-adv-x="857.1" />
<glyph glyph-name="info" unicode="&#xf128;" d="M393 149v-134q0-9-7-15t-15-7h-134q-9 0-16 7t-7 15v134q0 9 7 16t16 6h134q9 0 15-6t7-16z m176 335q0-30-8-56t-20-43-31-33-32-25-34-19q-23-13-38-37t-15-37q0-10-7-18t-16-9h-134q-8 0-14 11t-6 20v26q0 46 37 87t79 60q33 16 47 32t14 42q0 24-26 41t-60 18q-36 0-60-16-20-14-60-64-7-9-17-9-7 0-14 4l-91 70q-8 6-9 14t3 16q89 148 259 148 45 0 90-17t81-46 59-72 23-88z" horiz-adv-x="571.4" /> <glyph glyph-name="info" unicode="&#xf128;" d="M393 149v-134q0-9-7-15t-15-7h-134q-9 0-16 7t-7 15v134q0 9 7 16t16 6h134q9 0 15-6t7-16z m176 335q0-30-8-56t-20-43-31-33-32-25-34-19q-23-13-38-37t-15-37q0-10-7-18t-16-9h-134q-8 0-14 11t-6 20v26q0 46 37 87t79 60q33 16 47 32t14 42q0 24-26 41t-60 18q-36 0-60-16-20-14-60-64-7-9-17-9-7 0-14 4l-91 70q-8 6-9 14t3 16q89 148 259 148 45 0 90-17t81-46 59-72 23-88z" horiz-adv-x="571.4" />
<glyph glyph-name="bird" unicode="&#xf309;" d="M920 636q-36-54-94-98l0-24q0-130-60-250t-186-203-290-83q-160 0-290 84 14-2 46-2 132 0 234 80-62 2-110 38t-66 94q10-4 34-4 26 0 50 6-66 14-108 66t-42 120l0 2q36-20 84-24-84 58-84 158 0 48 26 94 154-188 390-196-6 18-6 42 0 78 55 133t135 55q82 0 136-58 60 12 120 44-20-66-82-104 56 8 108 30z" horiz-adv-x="920" /> <glyph glyph-name="bird" unicode="&#xf309;" d="M920 636q-36-54-94-98l0-24q0-130-60-250t-186-203-290-83q-160 0-290 84 14-2 46-2 132 0 234 80-62 2-110 38t-66 94q10-4 34-4 26 0 50 6-66 14-108 66t-42 120l0 2q36-20 84-24-84 58-84 158 0 48 26 94 154-188 390-196-6 18-6 42 0 78 55 133t135 55q82 0 136-58 60 12 120 44-20-66-82-104 56 8 108 30z" horiz-adv-x="920" />
</font> </font>
</defs> </defs>
</svg> </svg>

Before

Width:  |  Height:  |  Size: 7.3 KiB

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

Binary file not shown.

Binary file not shown.

5
public/js/hls.light.min.js vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -3,7 +3,6 @@
function playVideo(overlay) { function playVideo(overlay) {
const video = overlay.parentElement.querySelector('video'); const video = overlay.parentElement.querySelector('video');
const url = video.getAttribute("data-url"); const url = video.getAttribute("data-url");
const startTime = parseFloat(video.getAttribute("data-start") || "0");
video.setAttribute("controls", ""); video.setAttribute("controls", "");
overlay.style.display = "none"; overlay.style.display = "none";
@@ -13,13 +12,12 @@ function playVideo(overlay) {
hls.attachMedia(video); hls.attachMedia(video);
hls.on(Hls.Events.MANIFEST_PARSED, function () { hls.on(Hls.Events.MANIFEST_PARSED, function () {
hls.loadLevel = hls.levels.length - 1; hls.loadLevel = hls.levels.length - 1;
hls.startLoad(startTime); hls.startLoad();
video.play(); video.play();
}); });
} else if (video.canPlayType('application/vnd.apple.mpegurl')) { } else if (video.canPlayType('application/vnd.apple.mpegurl')) {
video.src = url; video.src = url;
video.addEventListener('canplay', function() { video.addEventListener('canplay', function() {
if (startTime > 0) video.currentTime = startTime;
video.play(); video.play();
}); });
} }

View File

@@ -1,225 +1,66 @@
// @license http://www.gnu.org/licenses/agpl-3.0.html AGPL-3.0 // @license http://www.gnu.org/licenses/agpl-3.0.html AGPL-3.0
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-License-Identifier: AGPL-3.0-only
function insertBeforeLast(node, elem) { function insertBeforeLast(node, elem) {
node.insertBefore(elem, node.childNodes[node.childNodes.length - 2]); node.insertBefore(elem, node.childNodes[node.childNodes.length - 2]);
} }
function getLoadMore(doc) { function getLoadMore(doc) {
return doc.querySelector(".show-more:not(.timeline-item)"); return doc.querySelector('.show-more:not(.timeline-item)');
} }
function getHrefs(selector) { function isDuplicate(item, itemClass) {
return new Set([...document.querySelectorAll(selector)].map(el => el.getAttribute("href"))); const tweet = item.querySelector(".tweet-link");
if (tweet == null) return false;
const href = tweet.getAttribute("href");
return document.querySelector(itemClass + " .tweet-link[href='" + href + "']") != null;
} }
function getTweetId(item) { window.onload = function() {
const m = item.querySelector(".tweet-link")?.getAttribute("href")?.match(/\/status\/(\d+)/); const url = window.location.pathname;
return m ? m[1] : ""; const isTweet = url.indexOf("/status/") !== -1;
} const containerClass = isTweet ? ".replies" : ".timeline";
const itemClass = containerClass + ' > div:not(.top-ref)';
function isDuplicate(item, hrefs) { var html = document.querySelector("html");
return hrefs.has(item.querySelector(".tweet-link")?.getAttribute("href")); var container = document.querySelector(containerClass);
} var loading = false;
const GAP = 10; window.addEventListener('scroll', function() {
if (loading) return;
if (html.scrollTop + html.clientHeight >= html.scrollHeight - 3000) {
loading = true;
var loadMore = getLoadMore(document);
if (loadMore == null) return;
class Masonry { loadMore.children[0].text = "Loading...";
constructor(container) {
this.container = container;
const colSizes = {
small: w => Math.max(130, w * 0.11),
medium: w => Math.max(190, Math.min(350, w * 0.22)),
large: w => Math.max(350, Math.min(480, w * 0.22)),
};
const size = container.dataset.colSize || "medium";
this._targetWidth = colSizes[size] || colSizes.medium;
this.colHeights = [];
this.colCounts = [];
this.colCount = 0;
this._lastWidth = 0;
this._colWidthCache = 0;
this._items = [];
this._revealTimer = null;
this.container.classList.add("masonry-active");
let resizeTimer; var url = new URL(loadMore.children[0].href);
window.addEventListener("resize", () => { url.searchParams.append('scroll', 'true');
clearTimeout(resizeTimer);
resizeTimer = setTimeout(() => this._rebuild(), 50);
});
// Re-sync positions whenever images finish loading and items grow taller. fetch(url.toString()).then(function (response) {
// Must be set up before _rebuild() so initial items get observed on first pass. return response.text();
let syncTimer; }).then(function (html) {
this._observer = window.ResizeObserver ? new ResizeObserver(() => { var parser = new DOMParser();
clearTimeout(syncTimer); var doc = parser.parseFromString(html, 'text/html');
syncTimer = setTimeout(() => this.syncHeights(), 100); loadMore.remove();
}) : null;
this._rebuild(); for (var item of doc.querySelectorAll(itemClass)) {
} if (item.className == "timeline-item show-more") continue;
if (isDuplicate(item, itemClass)) continue;
if (isTweet) container.appendChild(item);
else insertBeforeLast(container, item);
}
// Reveal all items and gallery siblings (show-more, top-ref). Idempotent. loading = false;
_revealAll() { const newLoadMore = getLoadMore(doc);
clearTimeout(this._revealTimer); if (newLoadMore == null) return;
for (const item of this._items) item.classList.add("masonry-visible"); if (isTweet) container.appendChild(newLoadMore);
for (const el of this.container.parentElement.querySelectorAll(":scope > .show-more, :scope > .top-ref, :scope > .timeline-footer")) else insertBeforeLast(container, newLoadMore);
el.classList.add("masonry-visible"); }).catch(function (err) {
} console.warn('Something went wrong.', err);
loading = true;
// Height-primary, count-as-tiebreaker: handles both tall tweets and unloaded images. });
_pickCol() {
return this.colHeights.reduce((min, h, i) => {
const m = this.colHeights[min];
return (h < m || (h === m && this.colCounts[i] < this.colCounts[min])) ? i : min;
}, 0);
}
// Position items using current column state. Updates colHeights, colCounts, container height.
_position(items, heights, colWidth) {
for (let i = 0; i < items.length; i++) {
const col = this._pickCol();
items[i].style.left = `${col * (colWidth + GAP)}px`;
items[i].style.top = `${this.colHeights[col]}px`;
this.colHeights[col] += heights[i] + GAP;
this.colCounts[col]++;
}
this.container.style.height = `${Math.max(0, ...this.colHeights)}px`;
}
// Full reset and re-place all items.
_place(items, heights, n, colWidth) {
this.colHeights = new Array(n).fill(0);
this.colCounts = new Array(n).fill(0);
this.colCount = n;
this._position(items, heights, colWidth);
}
_rebuild() {
const w = this.container.clientWidth;
const n = Math.max(1, Math.floor(w / this._targetWidth(w)));
if (n === this.colCount && w === this._lastWidth) return;
const isFirst = this.colCount === 0;
if (isFirst) {
this._items = [...this.container.querySelectorAll(".timeline-item")];
}
// Sort newest-first by tweet ID (snowflake IDs exceed Number precision, compare as strings).
this._items.sort((a, b) => {
const idA = getTweetId(a), idB = getTweetId(b);
if (idA.length !== idB.length) return idB.length - idA.length;
return idB < idA ? -1 : idB > idA ? 1 : 0;
});
// Pre-set widths BEFORE reading heights so measurements reflect the new column width.
const colWidth = this._colWidthCache = Math.floor((w - GAP * (n - 1)) / n);
for (const item of this._items) item.style.width = `${colWidth}px`;
this._place(this._items, this._items.map(item => item.offsetHeight), n, colWidth);
this._lastWidth = w;
if (isFirst) {
if (this._observer) this._items.forEach(item => this._observer.observe(item));
// Reveal immediately if all images are cached, else wait for syncHeights.
const hasUnloaded = this._items.some(item =>
[...item.querySelectorAll("img")].some(img => !img.complete));
if (hasUnloaded) {
this._revealTimer = setTimeout(() => this._revealAll(), 1000);
} else {
this._revealAll();
}
}
}
// Re-read actual heights and re-place all items. Fixes drift after images load.
syncHeights() {
this._place(this._items, this._items.map(item => item.offsetHeight), this.colCount, this._colWidthCache);
this._revealAll();
}
// Batch-add items in three phases to avoid O(N) reflows:
// 1. writes: set widths, append all — no reads, no reflows
// 2. one read: batch offsetHeight
// 3. writes: assign columns, set left/top
addAll(newItems) {
if (!newItems.length) return;
const colWidth = this._colWidthCache;
for (const item of newItems) {
item.style.width = `${colWidth}px`;
this.container.appendChild(item);
}
this._position(newItems, newItems.map(item => item.offsetHeight), colWidth);
this._items.push(...newItems);
if (this._observer) newItems.forEach(item => this._observer.observe(item));
}
}
document.addEventListener("DOMContentLoaded", function () {
const isTweet = location.pathname.includes("/status/");
const containerClass = isTweet ? ".replies" : ".timeline";
const itemClass = containerClass + " > div:not(.top-ref)";
const html = document.documentElement;
const container = document.querySelector(containerClass);
const masonryEl = container?.querySelector(".gallery-masonry");
const masonry = masonryEl ? new Masonry(masonryEl) : null;
let loading = false;
function handleScroll(failed) {
if (loading || html.scrollTop + html.clientHeight < html.scrollHeight - 3000) return;
const loadMore = getLoadMore(document);
if (!loadMore) return;
loading = true;
loadMore.children[0].text = "Loading...";
const url = new URL(loadMore.children[0].href);
url.searchParams.append("scroll", "true");
fetch(url)
.then(r => {
if (r.status > 299) throw new Error("error");
return r.text();
})
.then(responseText => {
const doc = new DOMParser().parseFromString(responseText, "text/html");
loadMore.remove();
if (masonry) {
masonry.syncHeights();
const newMasonry = doc.querySelector(".gallery-masonry");
if (newMasonry) {
const knownHrefs = getHrefs(".gallery-masonry .tweet-link");
masonry.addAll([...newMasonry.querySelectorAll(".timeline-item")].filter(item => !isDuplicate(item, knownHrefs)));
}
} else {
const knownHrefs = getHrefs(`${itemClass} .tweet-link`);
for (const item of doc.querySelectorAll(itemClass)) {
if (item.className === "timeline-item show-more" || isDuplicate(item, knownHrefs)) continue;
isTweet ? container.appendChild(item) : insertBeforeLast(container, item);
}
} }
});
loading = false; };
const newLoadMore = getLoadMore(doc);
if (newLoadMore) {
isTweet ? container.appendChild(newLoadMore) : insertBeforeLast(container, newLoadMore);
if (masonry) newLoadMore.classList.add("masonry-visible");
}
})
.catch(err => {
console.warn("Something went wrong.", err);
if (failed > 3) { loadMore.children[0].text = "Error"; return; }
loading = false;
handleScroll((failed || 0) + 1);
});
}
window.addEventListener("scroll", () => handleScroll());
});
// @license-end // @license-end

View File

@@ -4,15 +4,15 @@ Nitter is a free and open source alternative Twitter front-end focused on
privacy and performance. The source is available on GitHub at privacy and performance. The source is available on GitHub at
<https://github.com/zedeus/nitter> <https://github.com/zedeus/nitter>
- No JavaScript or ads * No JavaScript or ads
- All requests go through the backend, client never talks to Twitter * All requests go through the backend, client never talks to Twitter
- Prevents Twitter from tracking your IP or JavaScript fingerprint * Prevents Twitter from tracking your IP or JavaScript fingerprint
- Uses Twitter's unofficial API (no developer account required) * Uses Twitter's unofficial API (no rate limits or developer account required)
- Lightweight (for [@nim_lang](/nim_lang), 60KB vs 784KB from twitter.com) * Lightweight (for [@nim_lang](/nim_lang), 60KB vs 784KB from twitter.com)
- RSS feeds * RSS feeds
- Themes * Themes
- Mobile support (responsive design) * Mobile support (responsive design)
- AGPLv3 licensed, no proprietary instances permitted * AGPLv3 licensed, no proprietary instances permitted
Nitter's GitHub wiki contains Nitter's GitHub wiki contains
[instances](https://github.com/zedeus/nitter/wiki/Instances) and [instances](https://github.com/zedeus/nitter/wiki/Instances) and
@@ -21,13 +21,12 @@ maintained by the community.
## Why use Nitter? ## Why use Nitter?
It's impossible to use Twitter without JavaScript enabled, and as of 2024 you It's impossible to use Twitter without JavaScript enabled. For privacy-minded
need to sign up. For privacy-minded folks, preventing JavaScript analytics and folks, preventing JavaScript analytics and IP-based tracking is important, but
IP-based tracking is important, but apart from using a VPN and uBlock/uMatrix, apart from using a VPN and uBlock/uMatrix, it's impossible. Despite being behind
it's impossible. Despite being behind a VPN and using heavy-duty adblockers, a VPN and using heavy-duty adblockers, you can get accurately tracked with your
you can get accurately tracked with your [browser's [browser's fingerprint](https://restoreprivacy.com/browser-fingerprinting/),
fingerprint](https://restoreprivacy.com/browser-fingerprinting/), [no [no JavaScript required](https://noscriptfingerprint.com/). This all became
JavaScript required](https://noscriptfingerprint.com/). This all became
particularly important after Twitter [removed the particularly important after Twitter [removed the
ability](https://www.eff.org/deeplinks/2020/04/twitter-removes-privacy-option-and-shows-why-we-need-strong-privacy-laws) ability](https://www.eff.org/deeplinks/2020/04/twitter-removes-privacy-option-and-shows-why-we-need-strong-privacy-laws)
for users to control whether their data gets sent to advertisers. for users to control whether their data gets sent to advertisers.
@@ -43,13 +42,12 @@ Twitter account.
## Donating ## Donating
Liberapay: https://liberapay.com/zedeus \ Liberapay: <https://liberapay.com/zedeus> \
Patreon: https://patreon.com/nitter \ Patreon: <https://patreon.com/nitter> \
BTC: bc1qpqpzjkcpgluhzf7x9yqe7jfe8gpfm5v08mdr55 \ BTC: bc1qp7q4qz0fgfvftm5hwz3vy284nue6jedt44kxya \
ETH: 0x24a0DB59A923B588c7A5EBd0dBDFDD1bCe9c4460 \ ETH: 0x66d84bc3fd031b62857ad18c62f1ba072b011925 \
XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscWGWJCczFLe9RFhM3d1zpL \ LTC: ltc1qhsz5nxw6jw9rdtw9qssjeq2h8hqk2f85rdgpkr \
SOL: ANsyGNXFo6osuFwr1YnUqif2RdoYRhc27WdyQNmmETSW \ XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscWGWJCczFLe9RFhM3d1zpL
ZEC: u1vndfqtzyy6qkzhkapxelel7ams38wmfeccu3fdpy2wkuc4erxyjm8ncjhnyg747x6t0kf0faqhh2hxyplgaum08d2wnj4n7cyu9s6zhxkqw2aef4hgd4s6vh5hpqvfken98rg80kgtgn64ff70djy7s8f839z00hwhuzlcggvefhdlyszkvwy3c7yw623vw3rvar6q6evd3xcvveypt
## Contact ## Contact

View File

@@ -1,5 +0,0 @@
User-agent: *
Disallow: /
Crawl-delay: 1
User-agent: Twitterbot
Disallow:

View File

@@ -1,238 +1,124 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, strutils, sequtils, sugar import asyncdispatch, httpclient, uri, strutils, sequtils, sugar
import packedjson import packedjson
import types, query, formatters, consts, apiutils, parser, utils import types, query, formatters, consts, apiutils, parser
import experimental/parser as newParser import experimental/parser as newParser
# Helper to generate params object for GraphQL requests proc getGraphUser*(id: string): Future[User] {.async.} =
proc genParams(variables: string; fieldToggles = ""): seq[(string, string)] =
result.add ("variables", variables)
result.add ("features", gqlFeatures)
if fieldToggles.len > 0:
result.add ("fieldToggles", fieldToggles)
proc apiUrl(endpoint, variables: string; fieldToggles = ""): ApiUrl =
return ApiUrl(endpoint: endpoint, params: genParams(variables, fieldToggles))
proc apiReq(endpoint, variables: string; fieldToggles = ""): ApiReq =
let url = apiUrl(endpoint, variables, fieldToggles)
return ApiReq(cookie: url, oauth: url)
proc mediaUrl(id, cursor: string; count=20): ApiReq =
result = ApiReq(
cookie: apiUrl(graphUserMedia, userMediaVars % [id, cursor, $count]),
oauth: apiUrl(graphUserMediaV2, restIdVars % [id, cursor, $count])
)
proc userTweetsUrl(id: string; cursor: string): ApiReq =
result = ApiReq(
# cookie: apiUrl(graphUserTweets, userTweetsVars % [id, cursor], userTweetsFieldToggles),
oauth: apiUrl(graphUserTweetsV2, restIdVars % [id, cursor, "20"])
)
# might change this in the future pending testing
result.cookie = result.oauth
proc userTweetsAndRepliesUrl(id: string; cursor: string): ApiReq =
let cookieVars = userTweetsAndRepliesVars % [id, cursor]
result = ApiReq(
cookie: apiUrl(graphUserTweetsAndReplies, cookieVars, userTweetsFieldToggles),
oauth: apiUrl(graphUserTweetsAndRepliesV2, restIdVars % [id, cursor, "20"])
)
proc tweetDetailUrl(id: string; cursor: string): ApiReq =
let cookieVars = tweetDetailVars % [id, cursor]
result = ApiReq(
# cookie: apiUrl(graphTweetDetail, cookieVars, tweetDetailFieldToggles),
cookie: apiUrl(graphTweet, tweetVars % [id, cursor]),
oauth: apiUrl(graphTweet, tweetVars % [id, cursor])
)
proc userUrl(username: string): ApiReq =
let cookieVars = """{"screen_name":"$1","withGrokTranslatedBio":false}""" % username
result = ApiReq(
cookie: apiUrl(graphUser, cookieVars, tweetDetailFieldToggles),
oauth: apiUrl(graphUserV2, """{"screen_name": "$1"}""" % username)
)
proc getGraphUser*(username: string): Future[User] {.async.} =
if username.len == 0: return
let js = await fetchRaw(userUrl(username))
result = parseGraphUser(js)
proc getGraphUserById*(id: string): Future[User] {.async.} =
if id.len == 0 or id.any(c => not c.isDigit): return if id.len == 0 or id.any(c => not c.isDigit): return
let let
url = apiReq(graphUserById, """{"rest_id": "$1"}""" % id) variables = %*{"userId": id, "withSuperFollowsUserFields": true}
js = await fetchRaw(url) js = await fetchRaw(graphUser ? {"variables": $variables}, Api.userRestId)
result = parseGraphUser(js) result = parseGraphUser(js)
proc getAboutAccount*(username: string): Future[AccountInfo] {.async.} =
if username.len == 0: return
let
url = apiReq(graphAboutAccount, """{"screenName":"$1"}""" % username)
js = await fetch(url)
result = parseAboutAccount(js)
proc restReq(endpoint: string; params: seq[(string, string)] = @[]): ApiReq =
let url = ApiUrl(endpoint: endpoint, params: params)
ApiReq(cookie: url, oauth: url)
proc getBroadcastInfo*(id: string): Future[Broadcast] {.async.} =
if id.len == 0: return
let
req = apiReq(graphBroadcast, """{"id":"$1"}""" % id)
js = await fetch(req)
result = parseBroadcastInfo(js)
proc fetchBroadcastStream*(mediaKey: string): Future[string] {.async.} =
if mediaKey.len == 0: return
let
streamReq = restReq(restLiveStream & mediaKey)
streamJs = await fetch(streamReq)
result = streamJs{"source", "noRedirectPlaybackUrl"}.getStr(
streamJs{"source", "location"}.getStr)
proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profile] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
url = case kind
of TimelineKind.tweets: userTweetsUrl(id, cursor)
of TimelineKind.replies: userTweetsAndRepliesUrl(id, cursor)
of TimelineKind.media: mediaUrl(id, cursor, 100)
js = await fetch(url)
result = parseGraphTimeline(js, after)
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
url = apiReq(graphListTweets, restIdVars % [id, cursor, "20"])
js = await fetch(url)
result = parseGraphTimeline(js, after).tweets
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} = proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
let let
variables = %*{"screenName": name, "listSlug": list} variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false}
url = apiReq(graphListBySlug, $variables) url = graphListBySlug ? {"variables": $variables}
js = await fetch(url) result = parseGraphList(await fetch(url, Api.listBySlug))
result = parseGraphList(js)
proc getGraphList*(id: string): Future[List] {.async.} = proc getGraphList*(id: string): Future[List] {.async.} =
let let
url = apiReq(graphListById, """{"listId": "$1"}""" % id) variables = %*{"listId": id, "withHighlightedLabel": false}
js = await fetch(url) url = graphList ? {"variables": $variables}
result = parseGraphList(js) result = parseGraphList(await fetch(url, Api.list))
proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} = proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
if list.id.len == 0: return if list.id.len == 0: return
var let
variables = %*{ variables = %*{
"listId": list.id, "listId": list.id,
"cursor": after,
"withSuperFollowsUserFields": false,
"withBirdwatchPivots": false, "withBirdwatchPivots": false,
"withDownvotePerspective": false, "withDownvotePerspective": false,
"withReactionsMetadata": false, "withReactionsMetadata": false,
"withReactionsPerspective": false "withReactionsPerspective": false,
"withSuperFollowsTweetFields": false
} }
if after.len > 0: url = graphListMembers ? {"variables": $variables}
variables["cursor"] = % after result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)
let
url = apiReq(graphListMembers, $variables)
js = await fetchRaw(url)
result = parseGraphListMembers(js, after)
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} = proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return if id.len == 0: return
let let
url = apiReq(graphTweetResult, """{"rest_id": "$1"}""" % id) ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
js = await fetch(url) url = listTimeline ? ps
result = parseGraphTweetResult(js) result = parseTimeline(await fetch(url, Api.timeline), after)
proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} = proc getUser*(username: string): Future[User] {.async.} =
if username.len == 0: return
let
ps = genParams({"screen_name": username})
json = await fetchRaw(userShow ? ps, Api.userShow)
result = parseUser(json, username)
proc getUserById*(userId: string): Future[User] {.async.} =
if userId.len == 0: return
let
ps = genParams({"user_id": userId})
json = await fetchRaw(userShow ? ps, Api.userShow)
result = parseUser(json)
proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
if id.len == 0: return if id.len == 0: return
let let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: "" ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
js = await fetch(tweetDetailUrl(id, cursor)) url = timeline / (id & ".json") ? ps
result = parseGraphConversation(js, id) result = parseTimeline(await fetch(url, Api.timeline), after)
proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline))
proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
when T is User:
const
searchMode = ("result_filter", "user")
parse = parseUsers
fetchFunc = fetchRaw
else:
const
searchMode = ("tweet_search_mode", "live")
parse = parseTweets
fetchFunc = fetchRaw
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[T](beginning: true, query: query)
let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
try:
result = parse(await fetchFunc(url, Api.search), after)
result.query = query
except InternalError:
return Result[T](beginning: true, query: query)
proc getTweetImpl(id: string; after=""): Future[Conversation] {.async.} =
let url = tweet / (id & ".json") ? genParams(cursor=after)
result = parseConversation(await fetch(url, Api.tweet), id)
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} = proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} =
result = (await getGraphTweet(id, after)).replies result = (await getTweetImpl(id, after)).replies
result.beginning = after.len == 0 result.beginning = after.len == 0
proc getTweet*(id: string; after=""): Future[Conversation] {.async.} = proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
result = await getGraphTweet(id) result = await getTweetImpl(id)
if after.len > 0: if after.len > 0:
result.replies = await getReplies(id, after) result.replies = await getReplies(id, after)
proc getGraphEditHistory*(id: string): Future[EditHistory] {.async.} = proc getStatus*(id: string): Future[Tweet] {.async.} =
if id.len == 0: return let url = status / (id & ".json") ? genParams()
let result = parseStatus(await fetch(url, Api.status))
url = apiReq(graphTweetEditHistory, tweetEditHistoryVars % id)
js = await fetch(url)
result = parseGraphEditHistory(js, id)
proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
# workaround for #1372
let maxId =
if not after.startsWith("maxid:"): ""
else: validateNumber(after[6..^1])
let q = genQueryParam(query, maxId)
if q.len == 0 or q == emptyQuery:
return Timeline(query: query, beginning: true)
var
variables = %*{
"rawQuery": q,
"query_source": "typedQuery",
"count": 20,
"product": "Latest",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0 and maxId.len == 0:
variables["cursor"] = % after
let
url = apiReq(graphSearchTimeline, $variables)
js = await fetch(url)
result = parseGraphSearch[Tweets](js, after)
result.query = query
# when no more items are available the API just returns the last page in
# full. this detects that and clears the page instead.
if after.len > 0 and result.bottom.len > 0 and maxId.len == 0 and
after[0..<64] == result.bottom[0..<64]:
result.content.setLen(0)
proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
if query.text.len == 0:
return Result[User](query: query, beginning: true)
var
variables = %*{
"rawQuery": query.text,
"query_source": "typedQuery",
"count": 20,
"product": "People",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
result.beginning = false
let
url = apiReq(graphSearchTimeline, $variables)
js = await fetch(url)
result = parseGraphSearch[User](js, after)
result.query = query
proc getPhotoRail*(id: string): Future[PhotoRail] {.async.} =
if id.len == 0: return
let js = await fetch(mediaUrl(id, "", 30))
result = parseGraphPhotoRail(js)
proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} = proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
let client = newAsyncHttpClient(maxRedirects=0) let client = newAsyncHttpClient(maxRedirects=0)

View File

@@ -1,221 +1,121 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables import httpclient, asyncdispatch, options, strutils, uri
import jsony, packedjson, zippy, oauth1 import jsony, packedjson, zippy
import types, auth, consts, parserutils, http_pool, tid import types, tokens, consts, parserutils, http_pool
import experimental/types/common import experimental/types/common
const const
rlRemaining = "x-rate-limit-remaining" rlRemaining = "x-rate-limit-remaining"
rlReset = "x-rate-limit-reset" rlReset = "x-rate-limit-reset"
rlLimit = "x-rate-limit-limit"
errorsToSkip = {null, doesntExist, tweetNotFound, timeout, unauthorized, badRequest}
var var pool: HttpPool
pool: HttpPool
disableTid: bool
apiProxy: string
maxRetries: int
retryDelayMs: int
proc setDisableTid*(disable: bool) = proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
disableTid = disable count="20"; ext=true): seq[(string, string)] =
result = timelineParams
for p in pars:
result &= p
if ext:
result &= ("ext", "mediaStats")
result &= ("include_ext_alt_text", "true")
result &= ("include_ext_media_availability", "true")
if count.len > 0:
result &= ("count", count)
if cursor.len > 0:
# The raw cursor often has plus signs, which sometimes get turned into spaces,
# so we need to them back into a plus
if " " in cursor:
result &= ("cursor", cursor.replace(" ", "+"))
else:
result &= ("cursor", cursor)
proc setMaxRetries*(n: int) = proc genHeaders*(token: Token = nil): HttpHeaders =
maxRetries = n
proc setRetryDelayMs*(ms: int) =
retryDelayMs = ms
proc setApiProxy*(url: string) =
apiProxy = ""
if url.len > 0:
apiProxy = url.strip(chars={'/'}) & "/"
if "http" notin apiProxy:
apiProxy = "http://" & apiProxy
proc toUrl(req: ApiReq; sessionKind: SessionKind): Uri =
let url = case sessionKind
of oauth: req.oauth
of cookie: req.cookie
let base = case sessionKind
of oauth: "https://api.x.com"
of cookie: "https://x.com/i/api"
let prefix = if url.endpoint.startsWith("1.1/"): "" else: "graphql/"
parseUri(base) / (prefix & url.endpoint) ? url.params
proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
let
encodedUrl = url.replace(",", "%2C").replace("+", "%20")
params = OAuth1Parameters(
consumerKey: consumerKey,
signatureMethod: "HMAC-SHA1",
timestamp: $int(round(epochTime())),
nonce: "0",
isIncludeVersionToHeader: true,
token: oauthToken
)
signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret)
params.signature = percentEncode(signature)
return getOauth1RequestHeader(params)["authorization"]
proc getCookieHeader(authToken, ct0: string): string =
"auth_token=" & authToken & "; ct0=" & ct0
proc genHeaders*(session: Session, url: Uri): Future[HttpHeaders] {.async.} =
result = newHttpHeaders({ result = newHttpHeaders({
"accept": "*/*", "connection": "keep-alive",
"authorization": auth,
"content-type": "application/json",
"x-guest-token": if token == nil: "" else: token.tok,
"x-twitter-active-user": "yes",
"authority": "api.twitter.com",
"accept-encoding": "gzip", "accept-encoding": "gzip",
"accept-language": "en-US,en;q=0.9", "accept-language": "en-US,en;q=0.9",
"connection": "keep-alive", "accept": "*/*",
"content-type": "application/json", "DNT": "1"
"origin": "https://x.com",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36",
"x-twitter-active-user": "yes",
"x-twitter-client-language": "en",
"priority": "u=1, i"
}) })
case session.kind template updateToken() =
of SessionKind.oauth: if api != Api.search and resp.headers.hasKey(rlRemaining):
result["authorization"] = getOauthHeader($url, session.oauthToken, session.oauthSecret) let
of SessionKind.cookie: remaining = parseInt(resp.headers[rlRemaining])
result["x-twitter-auth-type"] = "OAuth2Session" reset = parseInt(resp.headers[rlReset])
result["x-csrf-token"] = session.ct0 token.setRateLimit(api, remaining, reset)
result["cookie"] = getCookieHeader(session.authToken, session.ct0)
result["sec-ch-ua"] = """"Google Chrome";v="142", "Chromium";v="142", "Not A(Brand";v="24""""
result["sec-ch-ua-mobile"] = "?0"
result["sec-ch-ua-platform"] = "Windows"
result["sec-fetch-dest"] = "empty"
result["sec-fetch-mode"] = "cors"
result["sec-fetch-site"] = "same-site"
if disableTid or "/1.1/" in url.path:
result["authorization"] = bearerToken2
else:
result["authorization"] = bearerToken
result["x-client-transaction-id"] = await genTid(url.path)
proc getAndValidateSession*(req: ApiReq): Future[Session] {.async.} =
result = await getSession(req)
case result.kind
of SessionKind.oauth:
if result.oauthToken.len == 0:
echo "[sessions] Empty oauth token, session: ", result.pretty
raise rateLimitError()
of SessionKind.cookie:
if result.authToken.len == 0 or result.ct0.len == 0:
echo "[sessions] Empty cookie credentials, session: ", result.pretty
raise rateLimitError()
template fetchImpl(result, fetchBody) {.dirty.} = template fetchImpl(result, fetchBody) {.dirty.} =
once: once:
pool = HttpPool() pool = HttpPool()
var token = await getToken(api)
if token.tok.len == 0:
raise rateLimitError()
try: try:
var resp: AsyncResponse var resp: AsyncResponse
pool.use(await genHeaders(session, url)): pool.use(genHeaders(token)):
template getContent = resp = await c.get($url)
# TODO: this is a temporary simple implementation result = await resp.body
if apiProxy.len > 0 and "/1.1/" notin url.path:
resp = await c.get(($url).replace("https://", apiProxy))
else:
resp = await c.get($url)
result = await resp.body
getContent()
if resp.status == $Http503: if resp.status == $Http503:
badClient = true badClient = true
raise newException(BadClientError, "Bad client") raise newException(InternalError, result)
if resp.status == $Http404 and result.len == 0:
echo "[sessions] transient 404 (empty body), retrying: ", url.path
raise rateLimitError()
if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
limit = parseInt(resp.headers[rlLimit])
session.setRateLimit(req, remaining, reset, limit)
if result.len > 0: if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip": if resp.headers.getOrDefault("content-encoding") == "gzip":
result = uncompress(result, dfGzip) result = uncompress(result, dfGzip)
else:
if result.startsWith("{\"errors"): echo "non-gzip body, url: ", url, ", body: ", result
let errors = result.fromJson(Errors)
if errors notin errorsToSkip:
echo "Fetch error, API: ", url.path, ", errors: ", errors
if errors in {expiredToken, badToken, locked}:
invalidate(session)
raise rateLimitError()
elif errors in {rateLimited}:
# rate limit hit, resets after 24 hours
setLimited(session, req)
raise rateLimitError()
elif result.startsWith("429 Too Many Requests"):
echo "[sessions] 429 error, API: ", url.path, ", session: ", session.pretty
raise rateLimitError()
fetchBody fetchBody
release(token, used=true)
if resp.status == $Http400: if resp.status == $Http400:
echo "ERROR 400, ", url.path, ": ", result
raise newException(InternalError, $url) raise newException(InternalError, $url)
except InternalError as e: except InternalError as e:
raise e raise e
except BadClientError as e:
raise e
except OSError as e:
raise e
except Exception as e: except Exception as e:
let s = session.pretty echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
echo "error: ", e.name, ", msg: ", e.msg, ", session: ", s, ", url: ", url if "length" notin e.msg and "descriptor" notin e.msg:
release(token, invalid=true)
raise rateLimitError() raise rateLimitError()
finally:
release(session)
template retry(bod) = proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} =
for i in 0 ..< maxRetries: var body: string
try: fetchImpl body:
bod if body.startsWith('{') or body.startsWith('['):
break result = parseJson(body)
except RateLimitError: else:
echo "[sessions] Rate limited, retrying ", req.cookie.endpoint, echo resp.status, ": ", body, " --- url: ", url
" request (", i, "/", maxRetries, ")..." result = newJNull()
if retryDelayMs > 0:
await sleepAsync(retryDelayMs)
proc fetch*(req: ApiReq): Future[JsonNode] {.async.} = updateToken()
retry:
var
body: string
session = await getAndValidateSession(req)
let url = req.toUrl(session.kind) let error = result.getError
if error in {invalidToken, forbidden, badToken}:
echo "fetch error: ", result.getError
release(token, invalid=true)
raise rateLimitError()
fetchImpl body: proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} =
if body.startsWith('{') or body.startsWith('['): fetchImpl result:
result = parseJson(body) if not (result.startsWith('{') or result.startsWith('[')):
else: echo resp.status, ": ", result, " --- url: ", url
echo resp.status, ": ", body, " --- url: ", url result.setLen(0)
result = newJNull()
let error = result.getError updateToken()
if error != null and error notin errorsToSkip:
echo "Fetch error, API: ", url.path, ", error: ", error
if error in {expiredToken, badToken, locked}:
invalidate(session)
raise rateLimitError()
proc fetchRaw*(req: ApiReq): Future[string] {.async.} = if result.startsWith("{\"errors"):
retry: let errors = result.fromJson(Errors)
var session = await getAndValidateSession(req) if errors in {invalidToken, forbidden, badToken}:
let url = req.toUrl(session.kind) echo "fetch error: ", errors
release(token, invalid=true)
fetchImpl result: raise rateLimitError()
if not (result.startsWith('{') or result.startsWith('[')):
echo resp.status, ": ", result, " --- url: ", url
result.setLen(0)

View File

@@ -1,213 +0,0 @@
#SPDX-License-Identifier: AGPL-3.0-only
import std/[asyncdispatch, times, json, random, strutils, tables, packedsets, os]
import types, consts
import experimental/parser/session
const hourInSeconds = 60 * 60
var
sessionPool: seq[Session]
enableLogging = false
# max requests at a time per session to avoid race conditions
maxConcurrentReqs = 2
proc setMaxConcurrentReqs*(reqs: int) =
if reqs > 0:
maxConcurrentReqs = reqs
template log(str: varargs[string, `$`]) =
echo "[sessions] ", str.join("")
proc endpoint(req: ApiReq; session: Session): string =
case session.kind
of oauth: req.oauth.endpoint
of cookie: req.cookie.endpoint
proc pretty*(session: Session): string =
if session.isNil:
return "<null>"
if session.id > 0 and session.username.len > 0:
result = $session.id & " (" & session.username & ")"
elif session.username.len > 0:
result = session.username
elif session.id > 0:
result = $session.id
else:
result = "<unknown>"
result = $session.kind & " " & result
proc snowflakeToEpoch(flake: int64): int64 =
int64(((flake shr 22) + 1288834974657) div 1000)
proc getSessionPoolHealth*(): JsonNode =
let now = epochTime().int
var
totalReqs = 0
limited: PackedSet[int64]
reqsPerApi: Table[string, int]
oldest = now.int64
newest = 0'i64
average = 0'i64
for session in sessionPool:
let created = snowflakeToEpoch(session.id)
if created > newest:
newest = created
if created < oldest:
oldest = created
average += created
if session.limited:
limited.incl session.id
for api in session.apis.keys:
let
apiStatus = session.apis[api]
reqs = apiStatus.limit - apiStatus.remaining
# no requests made with this session and endpoint since the limit reset
if apiStatus.reset < now:
continue
reqsPerApi.mgetOrPut($api, 0).inc reqs
totalReqs.inc reqs
if sessionPool.len > 0:
average = average div sessionPool.len
else:
oldest = 0
average = 0
return %*{
"sessions": %*{
"total": sessionPool.len,
"limited": limited.card,
"oldest": $fromUnix(oldest),
"newest": $fromUnix(newest),
"average": $fromUnix(average)
},
"requests": %*{
"total": totalReqs,
"apis": reqsPerApi
}
}
proc getSessionPoolDebug*(): JsonNode =
let now = epochTime().int
var list = newJObject()
for session in sessionPool:
let sessionJson = %*{
"apis": newJObject(),
"pending": session.pending,
}
if session.limited:
sessionJson["limited"] = %true
for api in session.apis.keys:
let
apiStatus = session.apis[api]
obj = %*{}
if apiStatus.reset > now.int:
obj["remaining"] = %apiStatus.remaining
obj["reset"] = %apiStatus.reset
if "remaining" notin obj:
continue
sessionJson{"apis", $api} = obj
list[$session.id] = sessionJson
return %list
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited")
proc noSessionsError*(): ref NoSessionsError =
newException(NoSessionsError, "no sessions available")
proc isLimited(session: Session; req: ApiReq): bool =
if session.isNil:
return true
let api = req.endpoint(session)
if session.limited and api != graphUserTweetsV2:
if (epochTime().int - session.limitedAt) > hourInSeconds:
session.limited = false
log "resetting limit: ", session.pretty
return false
else:
return true
if api in session.apis:
let limit = session.apis[api]
return limit.remaining <= 10 and limit.reset > epochTime().int
else:
return false
proc isReady(session: Session; req: ApiReq): bool =
not (session.isNil or session.pending > maxConcurrentReqs or session.isLimited(req))
proc invalidate*(session: var Session) =
if session.isNil: return
log "invalidating: ", session.pretty
# TODO: This isn't sufficient, but it works for now
let idx = sessionPool.find(session)
if idx > -1: sessionPool.delete(idx)
session = nil
proc release*(session: Session) =
if session.isNil: return
dec session.pending
proc getSession*(req: ApiReq): Future[Session] {.async.} =
for i in 0 ..< sessionPool.len:
if result.isReady(req): break
result = sessionPool.sample()
if not result.isNil and result.isReady(req):
inc result.pending
else:
log "no sessions available for API: ", req.cookie.endpoint
raise noSessionsError()
proc setLimited*(session: Session; req: ApiReq) =
let api = req.endpoint(session)
session.limited = true
session.limitedAt = epochTime().int
log "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", ", session.pretty
proc setRateLimit*(session: Session; req: ApiReq; remaining, reset, limit: int) =
# avoid undefined behavior in race conditions
let api = req.endpoint(session)
if api in session.apis:
let rateLimit = session.apis[api]
if rateLimit.reset >= reset and rateLimit.remaining < remaining:
return
if rateLimit.reset == reset and rateLimit.remaining >= remaining:
session.apis[api].remaining = remaining
return
session.apis[api] = RateLimit(limit: limit, remaining: remaining, reset: reset)
proc initSessionPool*(cfg: Config; path: string) =
enableLogging = cfg.enableDebug
if path.endsWith(".json"):
log "ERROR: .json is not supported, the file must be a valid JSONL file ending in .jsonl"
quit 1
if not fileExists(path):
log "ERROR: ", path, " not found. This file is required to authenticate API requests."
quit 1
log "parsing JSONL account sessions file: ", path
for line in path.lines:
sessionPool.add parseSession(line)
log "successfully added ", sessionPool.len, " valid account sessions"

View File

@@ -13,8 +13,6 @@ proc get*[T](config: parseCfg.Config; section, key: string; default: T): T =
proc getConfig*(path: string): (Config, parseCfg.Config) = proc getConfig*(path: string): (Config, parseCfg.Config) =
var cfg = loadConfig(path) var cfg = loadConfig(path)
let masterRss = cfg.get("Config", "enableRSS", true)
let conf = Config( let conf = Config(
# Server # Server
address: cfg.get("Server", "address", "0.0.0.0"), address: cfg.get("Server", "address", "0.0.0.0"),
@@ -39,19 +37,10 @@ proc getConfig*(path: string): (Config, parseCfg.Config) =
hmacKey: cfg.get("Config", "hmacKey", "secretkey"), hmacKey: cfg.get("Config", "hmacKey", "secretkey"),
base64Media: cfg.get("Config", "base64Media", false), base64Media: cfg.get("Config", "base64Media", false),
minTokens: cfg.get("Config", "tokenCount", 10), minTokens: cfg.get("Config", "tokenCount", 10),
enableRSSUserTweets: masterRss and cfg.get("Config", "enableRSSUserTweets", true), enableRss: cfg.get("Config", "enableRSS", true),
enableRSSUserReplies: masterRss and cfg.get("Config", "enableRSSUserReplies", true),
enableRSSUserMedia: masterRss and cfg.get("Config", "enableRSSUserMedia", true),
enableRSSSearch: masterRss and cfg.get("Config", "enableRSSSearch", true),
enableRSSList: masterRss and cfg.get("Config", "enableRSSList", true),
enableDebug: cfg.get("Config", "enableDebug", false), enableDebug: cfg.get("Config", "enableDebug", false),
proxy: cfg.get("Config", "proxy", ""), proxy: cfg.get("Config", "proxy", ""),
proxyAuth: cfg.get("Config", "proxyAuth", ""), proxyAuth: cfg.get("Config", "proxyAuth", "")
apiProxy: cfg.get("Config", "apiProxy", ""),
disableTid: cfg.get("Config", "disableTid", false),
maxConcurrentReqs: cfg.get("Config", "maxConcurrentReqs", 2),
maxRetries: cfg.get("Config", "maxRetries", 1),
retryDelayMs: cfg.get("Config", "retryDelayMs", 150)
) )
return (conf, cfg) return (conf, cfg)

View File

@@ -1,175 +1,59 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils import uri, sequtils
const const
consumerKey* = "3nVuSoBZnx6U4vzUxf5w" auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAPYXBAAAAAAACLXUNDekMxqa8h%2F40K4moUkGsoc%3DTYfbDKbT3jJPCEVnMYqilB28NHfOPqkca3qaAxGfsyKCs0wRbw"
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
bearerToken* = "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
bearerToken2* = "Bearer AAAAAAAAAAAAAAAAAAAAAFXzAwAAAAAAMHCxpeSDG1gLNLghVe8d74hl6k4%3DRUMF4xAQLsbeBhTSRrCiQpJtxoGWeyHrDb5te2jpGskWDFW82F"
graphUser* = "-oaLodhGbbnzJBACb1kk2Q/UserByScreenName" api = parseUri("https://api.twitter.com")
graphUserV2* = "WEoGnYB0EG1yGwamDCF6zg/UserResultByScreenNameQuery" activate* = $(api / "1.1/guest/activate.json")
graphUserById* = "VN33vKXrPT7p35DgNR27aw/UserResultByIdQuery"
graphUserTweetsV2* = "6QdSuZ5feXxOadEdXa4XZg/UserWithProfileTweetsQueryV2"
graphUserTweetsAndRepliesV2* = "BDX77Xzqypdt11-mDfgdpQ/UserWithProfileTweetsAndRepliesQueryV2"
graphUserTweets* = "oRJs8SLCRNRbQzuZG93_oA/UserTweets"
graphUserTweetsAndReplies* = "kkaJ0Mf34PZVarrxzLihjg/UserTweetsAndReplies"
graphUserMedia* = "36oKqyQ7E_9CmtONGjJRsA/UserMedia"
graphUserMediaV2* = "bp0e_WdXqgNBIwlLukzyYA/MediaTimelineV2"
graphTweet* = "b4pV7sWOe97RncwHcGESUA/ConversationTimeline"
graphTweetDetail* = "YVyS4SfwYW7Uw5qwy0mQCA/TweetDetail"
graphTweetResult* = "nzme9KiYhfIOrrLrPP_XeQ/TweetResultByIdQuery"
graphTweetEditHistory* = "upS9teTSG45aljmP9oTuXA/TweetEditHistory"
graphSearchTimeline* = "bshMIjqDk8LTXTq4w91WKw/SearchTimeline"
graphListById* = "cIUpT1UjuGgl_oWiY7Snhg/ListByRestId"
graphListBySlug* = "K6wihoTiTrzNzSF8y1aeKQ/ListBySlug"
graphListMembers* = "fuVHh5-gFn8zDBBxb8wOMA/ListMembers"
graphListTweets* = "VQf8_XQynI3WzH6xopOMMQ/ListTimeline"
graphAboutAccount* = "zs_jFPFT78rBpXv9Z3U2YQ/AboutAccountQuery"
graphBroadcast* = "0nMmbMh-_JwwRRFNXkyH3Q/BroadcastQuery" userShow* = api / "1.1/users/show.json"
restLiveStream* = "1.1/live_video_stream/status/" photoRail* = api / "1.1/statuses/media_timeline.json"
status* = api / "1.1/statuses/show"
search* = api / "2/search/adaptive.json"
gqlFeatures* = """{ timelineApi = api / "2/timeline"
"android_ad_formats_media_component_render_overlay_enabled": false, timeline* = timelineApi / "profile"
"android_graphql_skip_api_media_color_palette": false, mediaTimeline* = timelineApi / "media"
"android_professional_link_spotlight_display_enabled": false, listTimeline* = timelineApi / "list.json"
"articles_api_enabled": false, tweet* = timelineApi / "conversation"
"articles_preview_enabled": true,
"blue_business_profile_image_shape_enabled": false,
"c9s_tweet_anatomy_moderator_badge_enabled": true,
"commerce_android_shop_module_enabled": false,
"communities_web_enable_tweet_community_results_fetch": true,
"creator_subscriptions_quote_tweet_preview_enabled": false,
"creator_subscriptions_subscription_count_enabled": false,
"creator_subscriptions_tweet_preview_api_enabled": true,
"freedom_of_speech_not_reach_fetch_enabled": true,
"graphql_is_translatable_rweb_tweet_is_translatable_enabled": true,
"grok_android_analyze_trend_fetch_enabled": false,
"grok_translations_community_note_auto_translation_is_enabled": false,
"grok_translations_community_note_translation_is_enabled": false,
"grok_translations_post_auto_translation_is_enabled": false,
"grok_translations_timeline_user_bio_auto_translation_is_enabled": false,
"hidden_profile_likes_enabled": false,
"highlights_tweets_tab_ui_enabled": false,
"immersive_video_status_linkable_timestamps": false,
"interactive_text_enabled": false,
"longform_notetweets_consumption_enabled": true,
"longform_notetweets_inline_media_enabled": true,
"longform_notetweets_richtext_consumption_enabled": true,
"longform_notetweets_rich_text_read_enabled": true,
"mobile_app_spotlight_module_enabled": false,
"payments_enabled": false,
"post_ctas_fetch_enabled": true,
"premium_content_api_read_enabled": false,
"profile_label_improvements_pcf_label_in_post_enabled": true,
"profile_label_improvements_pcf_label_in_profile_enabled": false,
"responsive_web_edit_tweet_api_enabled": true,
"responsive_web_enhance_cards_enabled": false,
"responsive_web_graphql_exclude_directive_enabled": true,
"responsive_web_graphql_skip_user_profile_image_extensions_enabled": false,
"responsive_web_graphql_timeline_navigation_enabled": true,
"responsive_web_grok_analysis_button_from_backend": true,
"responsive_web_grok_analyze_button_fetch_trends_enabled": false,
"responsive_web_grok_analyze_post_followups_enabled": true,
"responsive_web_grok_annotations_enabled": true,
"responsive_web_grok_community_note_auto_translation_is_enabled": false,
"responsive_web_grok_image_annotation_enabled": true,
"responsive_web_grok_imagine_annotation_enabled": true,
"responsive_web_grok_share_attachment_enabled": true,
"responsive_web_grok_show_grok_translated_post": false,
"responsive_web_jetfuel_frame": true,
"responsive_web_media_download_video_enabled": false,
"responsive_web_profile_redirect_enabled": false,
"responsive_web_text_conversations_enabled": false,
"responsive_web_twitter_article_notes_tab_enabled": false,
"responsive_web_twitter_article_tweet_consumption_enabled": true,
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
"rweb_lists_timeline_redesign_enabled": true,
"rweb_tipjar_consumption_enabled": true,
"rweb_video_screen_enabled": false,
"rweb_video_timestamps_enabled": false,
"spaces_2022_h2_clipping": true,
"spaces_2022_h2_spaces_communities": true,
"standardized_nudges_misinfo": true,
"subscriptions_feature_can_gift_premium": false,
"subscriptions_verification_info_enabled": true,
"subscriptions_verification_info_is_identity_verified_enabled": false,
"subscriptions_verification_info_reason_enabled": true,
"subscriptions_verification_info_verified_since_enabled": true,
"super_follow_badge_privacy_enabled": false,
"super_follow_exclusive_tweet_notifications_enabled": false,
"super_follow_tweet_api_enabled": false,
"super_follow_user_api_enabled": false,
"tweet_awards_web_tipping_enabled": false,
"tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": true,
"tweetypie_unmention_optimization_enabled": false,
"unified_cards_ad_metadata_container_dynamic_card_content_query_enabled": false,
"unified_cards_destination_url_params_enabled": false,
"verified_phone_label_enabled": false,
"vibe_api_enabled": false,
"view_counts_everywhere_api_enabled": true,
"hidden_profile_subscriptions_enabled": false
}""".replace(" ", "").replace("\n", "")
tweetVars* = """{ graphql = api / "graphql"
"postId": "$1", graphUser* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId"
$2 graphList* = graphql / "JADTh6cjebfgetzvF3tQvQ/List"
"includeHasBirdwatchNotes": false, graphListBySlug* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug"
"includePromotedContent": false, graphListMembers* = graphql / "Ke6urWMeCV2UlKXGRy4sow/ListMembers"
"withBirdwatchNotes": true,
"withVoice": false,
"withV2Timeline": true
}""".replace(" ", "").replace("\n", "")
tweetDetailVars* = """{ timelineParams* = {
"focalTweetId": "$1", "include_profile_interstitial_type": "0",
$2 "include_blocking": "0",
"referrer": "profile", "include_blocked_by": "0",
"with_rux_injections": false, "include_followed_by": "0",
"rankingMode": "Relevance", "include_want_retweets": "0",
"includePromotedContent": true, "include_mute_edge": "0",
"withCommunity": true, "include_can_dm": "0",
"withQuickPromoteEligibilityTweetFields": true, "include_can_media_tag": "1",
"withBirdwatchNotes": true, "skip_status": "1",
"withVoice": true "cards_platform": "Web-12",
}""".replace(" ", "").replace("\n", "") "include_cards": "1",
"include_composer_source": "false",
"include_reply_count": "1",
"tweet_mode": "extended",
"include_entities": "true",
"include_user_entities": "true",
"include_ext_media_color": "false",
"send_error_codes": "true",
"simple_quoted_tweet": "true",
"include_quote_count": "true"
}.toSeq
tweetEditHistoryVars* = """{ searchParams* = {
"tweetId": "$1", "query_source": "typed_query",
"withQuickPromoteEligibilityTweetFields": true "pc": "1",
}""".replace(" ", "").replace("\n", "") "spelling_corrections": "1"
}.toSeq
restIdVars* = """{ ## top: nothing
"rest_id": "$1", $2 ## latest: "tweet_search_mode: live"
"count": $3 ## user: "result_filter: user"
}""" ## photos: "result_filter: photos"
## videos: "result_filter: videos"
userMediaVars* = """{
"userId": "$1", $2
"count": $3,
"includePromotedContent": false,
"withClientEventToken": false,
"withBirdwatchNotes": false,
"withVoice": true
}""".replace(" ", "").replace("\n", "")
userTweetsVars* = """{
"userId": "$1", $2
"count": 20,
"includePromotedContent": false,
"withQuickPromoteEligibilityTweetFields": true,
"withVoice": true
}""".replace(" ", "").replace("\n", "")
userTweetsAndRepliesVars* = """{
"userId": "$1", $2
"count": 20,
"includePromotedContent": false,
"withCommunity": true,
"withVoice": true
}""".replace(" ", "").replace("\n", "")
userFieldToggles = """{"withPayments":false,"withAuxiliaryUserLabels":true}"""
userTweetsFieldToggles* = """{"withArticlePlainText":false}"""
tweetDetailFieldToggles* = """{"withArticleRichContentState":true,"withArticlePlainText":false,"withGrokAnalyze":false,"withDisallowedReplyControls":false}"""

View File

@@ -1,2 +1,2 @@
import parser/[user, graphql] import parser/[user, graphql, timeline]
export user, graphql export user, graphql, timeline

View File

@@ -1,53 +1,11 @@
import options, strutils
import jsony import jsony
import user, utils, ../types/[graphuser, graphlistmembers] import user, ../types/[graphuser, graphlistmembers]
from ../../types import User, VerifiedType, Result, Query, QueryKind from ../../types import User, Result, Query, QueryKind
proc parseUserResult*(userResult: UserResult): User =
result = userResult.legacy
if result.verifiedType == none and userResult.isBlueVerified:
result.verifiedType = blue
if result.username.len == 0 and userResult.core.screenName.len > 0:
result.id = userResult.restId
result.username = userResult.core.screenName
result.fullname = userResult.core.name
result.userPic = userResult.avatar.imageUrl.replace("_normal", "")
if userResult.privacy.isSome:
result.protected = userResult.privacy.get.protected
if userResult.location.isSome:
result.location = userResult.location.get.location
if userResult.core.createdAt.len > 0:
result.joinDate = parseTwitterDate(userResult.core.createdAt)
if userResult.verification.isSome:
let v = userResult.verification.get
if v.verifiedType != VerifiedType.none:
result.verifiedType = v.verifiedType
if userResult.profileBio.isSome and result.bio.len == 0:
result.bio = userResult.profileBio.get.description
proc parseGraphUser*(json: string): User = proc parseGraphUser*(json: string): User =
if json.len == 0 or json[0] != '{': let raw = json.fromJson(GraphUser)
return result = toUser raw.data.user.result.legacy
result.id = raw.data.user.result.restId
let
raw = json.fromJson(GraphUser)
userResult =
if raw.data.userResult.isSome: raw.data.userResult.get.result
elif raw.data.user.isSome: raw.data.user.get.result
else: UserResult()
if userResult.unavailableReason.get("") == "Suspended" or
userResult.reason.get("") == "Suspended":
return User(suspended: true)
result = parseUserResult(userResult)
proc parseGraphListMembers*(json, cursor: string): Result[User] = proc parseGraphListMembers*(json, cursor: string): Result[User] =
result = Result[User]( result = Result[User](
@@ -63,7 +21,7 @@ proc parseGraphListMembers*(json, cursor: string): Result[User] =
of TimelineTimelineItem: of TimelineTimelineItem:
let userResult = entry.content.itemContent.userResults.result let userResult = entry.content.itemContent.userResults.result
if userResult.restId.len > 0: if userResult.restId.len > 0:
result.content.add parseUserResult(userResult) result.content.add toUser userResult.legacy
of TimelineTimelineCursor: of TimelineTimelineCursor:
if entry.content.cursorType == "Bottom": if entry.content.cursorType == "Bottom":
result.bottom = entry.content.value result.bottom = entry.content.value

View File

@@ -0,0 +1,44 @@
import std/[json, strutils, times, math]
import utils
import ".."/types/[media, tweet]
from ../../types import Poll, Gif, Video, VideoVariant, VideoType
proc parseVideo*(entity: Entity): Video =
result = Video(
thumb: entity.mediaUrlHttps.getImageUrl,
views: entity.ext.mediaStats{"r", "ok", "viewCount"}.getStr,
available: entity.extMediaAvailability.status == "available",
title: entity.extAltText,
durationMs: entity.videoInfo.durationMillis,
description: entity.additionalMediaInfo.description,
variants: entity.videoInfo.variants
# playbackType: mp4
)
if entity.additionalMediaInfo.title.len > 0:
result.title = entity.additionalMediaInfo.title
proc parseGif*(entity: Entity): Gif =
result = Gif(
url: entity.videoInfo.variants[0].url.getImageUrl,
thumb: entity.getImageUrl
)
proc parsePoll*(card: Card): Poll =
let vals = card.bindingValues
# name format is pollNchoice_*
for i in '1' .. card.name[4]:
let choice = "choice" & i
result.values.add parseInt(vals{choice & "_count", "string_value"}.getStr("0"))
result.options.add vals{choice & "_label", "string_value"}.getStr
let time = vals{"end_datetime_utc", "string_value"}.getStr.parseIsoDate
if time > now():
let timeLeft = $(time - now())
result.status = timeLeft[0 ..< timeLeft.find(",")]
else:
result.status = "Final results"
result.leader = result.values.find(max(result.values))
result.votes = result.values.sum

View File

@@ -1,30 +0,0 @@
import std/strutils
import jsony
import ../types/session
from ../../types import Session, SessionKind
proc parseSession*(raw: string): Session =
let session = raw.fromJson(RawSession)
let kind = if session.kind == "": "oauth" else: session.kind
case kind
of "oauth":
let id = session.oauthToken[0 ..< session.oauthToken.find('-')]
result = Session(
kind: SessionKind.oauth,
id: parseBiggestInt(id),
username: session.username,
oauthToken: session.oauthToken,
oauthSecret: session.oauthTokenSecret
)
of "cookie":
let id = if session.id.len > 0: parseBiggestInt(session.id) else: 0
result = Session(
kind: SessionKind.cookie,
id: id,
username: session.username,
authToken: session.authToken,
ct0: session.ct0
)
else:
raise newException(ValueError, "Unknown session kind: " & kind)

View File

@@ -1,15 +1,14 @@
import std/[macros, htmlgen, unicode] import std/[macros, htmlgen, unicode]
import ../types/common
import ".."/../[formatters, utils] import ".."/../[formatters, utils]
type type
ReplaceSliceKind = enum ReplaceSliceKind* = enum
rkRemove, rkUrl, rkHashtag, rkMention rkRemove, rkUrl, rkHashtag, rkMention
ReplaceSlice* = object ReplaceSlice* = object
slice: Slice[int] slice*: Slice[int]
kind: ReplaceSliceKind kind*: ReplaceSliceKind
url, display: string url*, display*: string
proc cmp*(x, y: ReplaceSlice): int = cmp(x.slice.a, y.slice.b) proc cmp*(x, y: ReplaceSlice): int = cmp(x.slice.a, y.slice.b)
@@ -27,11 +26,14 @@ proc dedupSlices*(s: var seq[ReplaceSlice]) =
inc j inc j
inc i inc i
proc extractUrls*(result: var seq[ReplaceSlice]; url: Url; proc extractHashtags*(result: var seq[ReplaceSlice]; slice: Slice[int]) =
textLen: int; hideTwitter = false) = result.add ReplaceSlice(kind: rkHashtag, slice: slice)
proc extractUrls*[T](result: var seq[ReplaceSlice]; entity: T;
textLen: int; hideTwitter = false) =
let let
link = url.expandedUrl link = entity.expandedUrl
slice = url.indices[0] ..< url.indices[1] slice = entity.indices
if hideTwitter and slice.b.succ >= textLen and link.isTwitterUrl: if hideTwitter and slice.b.succ >= textLen and link.isTwitterUrl:
if slice.a < textLen: if slice.a < textLen:
@@ -54,7 +56,7 @@ proc replacedWith*(runes: seq[Rune]; repls: openArray[ReplaceSlice];
let let
name = $runes[rep.slice.a.succ .. rep.slice.b] name = $runes[rep.slice.a.succ .. rep.slice.b]
symbol = $runes[rep.slice.a] symbol = $runes[rep.slice.a]
result.add a(symbol & name, href = "/search?f=tweets&q=%23" & name) result.add a(symbol & name, href = "/search?q=%23" & name)
of rkMention: of rkMention:
result.add a($runes[rep.slice], href = rep.url, title = rep.display) result.add a($runes[rep.slice], href = rep.url, title = rep.display)
of rkUrl: of rkUrl:

View File

@@ -1,8 +0,0 @@
import jsony
import ../types/tid
export TidPair
proc parseTidPairs*(raw: string): seq[TidPair] =
result = raw.fromJson(seq[TidPair])
if result.len == 0:
raise newException(ValueError, "Parsing pairs failed: " & raw)

View File

@@ -0,0 +1,84 @@
import std/[strutils, tables, options]
import jsony
import user, tweet, utils, ../types/timeline
from ../../types import Result, User, Tweet
proc parseHook(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]
proc getId(id: string): string {.inline.} =
let start = id.rfind("-")
if start < 0: return id
id[start + 1 ..< id.len]
proc processTweet(id: string; objects: GlobalObjects;
userCache: var Table[string, User]): Tweet =
let raw = objects.tweets[id]
result = toTweet raw
let uid = result.user.id
if uid.len > 0 and uid in objects.users:
if uid notin userCache:
userCache[uid] = toUser objects.users[uid]
result.user = userCache[uid]
let rtId = raw.retweetedStatusIdStr
if rtId.len > 0:
if rtId in objects.tweets:
result.retweet = some processTweet(rtId, objects, userCache)
else:
result.retweet = some Tweet(id: rtId.toId)
let qId = raw.quotedStatusIdStr
if qId.len > 0:
if qId in objects.tweets:
result.quote = some processTweet(qId, objects, userCache)
else:
result.quote = some Tweet(id: qId.toId)
proc parseCursor[T](e: Entry; result: var Result[T]) =
let cursor = e.content.operation.cursor
if cursor.cursorType == "Top":
result.top = cursor.value
elif cursor.cursorType == "Bottom":
result.bottom = cursor.value
proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0)
let raw = json.fromJson(Search)
if raw.timeline.instructions.len == 0:
return
for e in raw.timeline.instructions[0].addEntries.entries:
let
eId = e.entryId
id = eId.getId
if eId.startsWith("user") or eId.startsWith("sq-U"):
if id in raw.globalObjects.users:
result.content.add toUser raw.globalObjects.users[id]
elif eId.startsWith("cursor") or eId.startsWith("sq-C"):
parseCursor(e, result)
proc parseTweets*(json: string; after=""): Result[Tweet] =
result = Result[Tweet](beginning: after.len == 0)
let raw = json.fromJson(Search)
if raw.timeline.instructions.len == 0:
return
var userCache: Table[string, User]
for e in raw.timeline.instructions[0].addEntries.entries:
let
eId = e.entryId
id = eId.getId
if eId.startsWith("tweet") or eId.startsWith("sq-I-t"):
if id in raw.globalObjects.tweets:
result.content.add processTweet(id, raw.globalObjects, userCache)
elif eId.startsWith("cursor") or eId.startsWith("sq-C"):
parseCursor(e, result)

View File

@@ -0,0 +1,97 @@
import std/[strutils, options, algorithm, json]
import std/unicode except strip
import utils, slices, media, user
import ../types/tweet
from ../types/media as mediaTypes import MediaType
from ../../types import Tweet, User, TweetStats
proc expandTweetEntities(tweet: var Tweet; raw: RawTweet) =
let
orig = raw.fullText.toRunes
textRange = raw.displayTextRange
textSlice = textRange[0] .. textRange[1]
hasCard = raw.card.isSome
var replyTo = ""
if tweet.replyId > 0:
tweet.reply.add raw.inReplyToScreenName
replyTo = raw.inReplyToScreenName
var replacements = newSeq[ReplaceSlice]()
for u in raw.entities.urls:
if u.url.len == 0 or u.url notin raw.fullText:
continue
replacements.extractUrls(u, textSlice.b, hideTwitter=raw.isQuoteStatus)
# if hasCard and u.url == get(tweet.card).url:
# get(tweet.card).url = u.expandedUrl
for m in raw.entities.media:
replacements.extractUrls(m, textSlice.b, hideTwitter=true)
for hashtag in raw.entities.hashtags:
replacements.extractHashtags(hashtag.indices)
for symbol in raw.entities.symbols:
replacements.extractHashtags(symbol.indices)
for mention in raw.entities.userMentions:
let
name = mention.screenName
idx = tweet.reply.find(name)
if mention.indices.a >= textSlice.a:
replacements.add ReplaceSlice(kind: rkMention, slice: mention.indices,
url: "/" & name, display: mention.name)
if idx > -1 and name != replyTo:
tweet.reply.delete idx
elif idx == -1 and tweet.replyId != 0:
tweet.reply.add name
replacements.dedupSlices
replacements.sort(cmp)
tweet.text = orig.replacedWith(replacements, textSlice)
.strip(leading=false)
proc toTweet*(raw: RawTweet): Tweet =
result = Tweet(
id: raw.idStr.toId,
threadId: raw.conversationIdStr.toId,
replyId: raw.inReplyToStatusIdStr.toId,
time: parseTwitterDate(raw.createdAt),
hasThread: raw.selfThread.idStr.len > 0,
available: true,
user: User(id: raw.userIdStr),
stats: TweetStats(
replies: raw.replyCount,
retweets: raw.retweetCount,
likes: raw.favoriteCount,
quotes: raw.quoteCount
)
)
result.expandTweetEntities(raw)
if raw.card.isSome:
let card = raw.card.get
if "poll" in card.name:
result.poll = some parsePoll(card)
if "image" in card.name:
result.photos.add card.bindingValues{"image_large", "image_value", "url"}
.getStr.getImageUrl
# elif card.name == "amplify":
# discard
# # result.video = some(parsePromoVideo(jsCard{"binding_values"}))
# else:
# result.card = some parseCard(card, raw.entities.urls)
for m in raw.extendedEntities.media:
case m.kind
of photo: result.photos.add m.getImageUrl
of video:
result.video = some parseVideo(m)
if m.additionalMediaInfo.sourceUser.isSome:
result.attribution = some toUser get(m.additionalMediaInfo.sourceUser)
of animatedGif: result.gif = some parseGif(m)

View File

@@ -1,12 +1,8 @@
import std/[options, tables, strutils, strformat, sugar] import std/[options, tables, strformat]
import jsony import jsony
import user, ../types/unifiedcard import utils
import ../../formatters import ".."/types/[unifiedcard, media]
from ../../types import Card, CardKind, Video from ../../types import Card, CardKind, Video
from ../../utils import twimg, https
proc getImageUrl(entity: MediaEntity): string =
entity.mediaUrlHttps.dup(removePrefix(twimg), removePrefix(https))
proc parseDestination(id: string; card: UnifiedCard; result: var Card) = proc parseDestination(id: string; card: UnifiedCard; result: var Card) =
let destination = card.destinationObjects[id].data let destination = card.destinationObjects[id].data
@@ -28,14 +24,6 @@ proc parseMediaDetails(data: ComponentData; card: UnifiedCard; result: var Card)
result.text = data.topicDetail.title result.text = data.topicDetail.title
result.dest = "Topic" result.dest = "Topic"
proc parseJobDetails(data: ComponentData; card: UnifiedCard; result: var Card) =
data.destination.parseDestination(card, result)
result.kind = CardKind.jobDetails
result.title = data.title
result.text = data.shortDescriptionText
result.dest = &"@{data.profileUser.username} · {data.location}"
proc parseAppDetails(data: ComponentData; card: UnifiedCard; result: var Card) = proc parseAppDetails(data: ComponentData; card: UnifiedCard; result: var Card) =
let app = card.appStoreData[data.appId][0] let app = card.appStoreData[data.appId][0]
@@ -75,20 +63,7 @@ proc parseMedia(component: Component; card: UnifiedCard; result: var Card) =
durationMs: videoInfo.durationMillis, durationMs: videoInfo.durationMillis,
variants: videoInfo.variants variants: videoInfo.variants
) )
of model3d: of animatedGif: discard
result.title = "Unsupported 3D model ad"
proc parseGrokShare(data: ComponentData; card: UnifiedCard; result: var Card) =
result.kind = summaryLarge
data.destination.parseDestination(card, result)
result.dest = "Answer by Grok"
for msg in data.conversationPreview:
if msg.sender == "USER":
result.title = msg.message.shorten(70)
elif msg.sender == "AGENT":
result.text = msg.message.shorten(500)
proc parseUnifiedCard*(json: string): Card = proc parseUnifiedCard*(json: string): Card =
let card = json.fromJson(UnifiedCard) let card = json.fromJson(UnifiedCard)
@@ -101,18 +76,10 @@ proc parseUnifiedCard*(json: string): Card =
component.data.parseAppDetails(card, result) component.data.parseAppDetails(card, result)
of mediaWithDetailsHorizontal: of mediaWithDetailsHorizontal:
component.data.parseMediaDetails(card, result) component.data.parseMediaDetails(card, result)
of media, swipeableMedia: of ComponentType.media, swipeableMedia:
component.parseMedia(card, result) component.parseMedia(card, result)
of buttonGroup: of buttonGroup:
discard discard
of grokShare:
component.data.parseGrokShare(card, result)
of ComponentType.jobDetails:
component.data.parseJobDetails(card, result)
of ComponentType.hidden:
result.kind = CardKind.hidden
of ComponentType.unknown:
echo "ERROR: Unknown component type: ", json
case component.kind case component.kind
of twitterListDetails: of twitterListDetails:

View File

@@ -1,15 +1,15 @@
import std/[algorithm, unicode, re, strutils, strformat, options, nre] import std/[algorithm, unicode, re, strutils, strformat, options]
import jsony import jsony
import utils, slices import utils, slices
import ../types/user as userType import ../types/user as userType
from ../../types import Result, User, Error from ../../types import User, Error
let let
unRegex = re.re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})" unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
unReplace = "$1<a href=\"/$2\">@$2</a>" unReplace = "$1<a href=\"/$2\">@$2</a>"
htRegex = nre.re"""(*U)(^|[^\w-_.?])([#$])([\w_]*+)(?!</a>|">|#)""" htRegex = re"(^|[^\w-_./?])([#$])([\w_]+)"
htReplace = "$1<a href=\"/search?f=tweets&q=%23$3\">$2$3</a>" htReplace = "$1<a href=\"/search?q=%23$3\">$2$3</a>"
proc expandUserEntities(user: var User; raw: RawUser) = proc expandUserEntities(user: var User; raw: RawUser) =
let let
@@ -29,7 +29,7 @@ proc expandUserEntities(user: var User; raw: RawUser) =
user.bio = orig.replacedWith(replacements, 0 .. orig.len) user.bio = orig.replacedWith(replacements, 0 .. orig.len)
.replacef(unRegex, unReplace) .replacef(unRegex, unReplace)
.replace(htRegex, htReplace) .replacef(htRegex, htReplace)
proc getBanner(user: RawUser): string = proc getBanner(user: RawUser): string =
if user.profileBannerUrl.len > 0: if user.profileBannerUrl.len > 0:
@@ -56,21 +56,23 @@ proc toUser*(raw: RawUser): User =
tweets: raw.statusesCount, tweets: raw.statusesCount,
likes: raw.favouritesCount, likes: raw.favouritesCount,
media: raw.mediaCount, media: raw.mediaCount,
verifiedType: raw.verifiedType, verified: raw.verified,
protected: raw.protected, protected: raw.protected,
joinDate: parseTwitterDate(raw.createdAt),
banner: getBanner(raw), banner: getBanner(raw),
userPic: getImageUrl(raw.profileImageUrlHttps).replace("_normal", "") userPic: getImageUrl(raw.profileImageUrlHttps).replace("_normal", "")
) )
if raw.createdAt.len > 0:
result.joinDate = parseTwitterDate(raw.createdAt)
if raw.pinnedTweetIdsStr.len > 0: if raw.pinnedTweetIdsStr.len > 0:
result.pinnedTweet = parseBiggestInt(raw.pinnedTweetIdsStr[0]) result.pinnedTweet = parseBiggestInt(raw.pinnedTweetIdsStr[0])
result.expandUserEntities(raw) result.expandUserEntities(raw)
proc parseHook*(s: string; i: var int; v: var User) = proc parseUser*(json: string; username=""): User =
var u: RawUser handleErrors:
parseHook(s, i, u) case error.code
v = toUser u of suspended: return User(username: username, suspended: true)
of userNotFound: return
else: echo "[error - parseUser]: ", error
result = toUser json.fromJson(RawUser)

View File

@@ -1,12 +1,16 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import std/[sugar, strutils, times] import std/[sugar, strutils, times]
import ../types/common import ".."/types/[common, media, tweet]
import ../../utils as uutils import ../../utils as uutils
template parseTime(time: string; f: static string; flen: int): DateTime = template parseTime(time: string; f: static string; flen: int): DateTime =
if time.len != flen: return if time.len != flen: return
parse(time, f, utc()) parse(time, f, utc())
proc toId*(id: string): int64 =
if id.len == 0: 0'i64
else: parseBiggestInt(id)
proc parseIsoDate*(date: string): DateTime = proc parseIsoDate*(date: string): DateTime =
date.parseTime("yyyy-MM-dd\'T\'HH:mm:ss\'Z\'", 20) date.parseTime("yyyy-MM-dd\'T\'HH:mm:ss\'Z\'", 20)
@@ -16,6 +20,9 @@ proc parseTwitterDate*(date: string): DateTime =
proc getImageUrl*(url: string): string = proc getImageUrl*(url: string): string =
url.dup(removePrefix(twimg), removePrefix(https)) url.dup(removePrefix(twimg), removePrefix(https))
proc getImageUrl*(entity: MediaEntity | Entity): string =
entity.mediaUrlHttps.getImageUrl
template handleErrors*(body) = template handleErrors*(body) =
if json.startsWith("{\"errors"): if json.startsWith("{\"errors"):
for error {.inject.} in json.fromJson(Errors).errors: for error {.inject.} in json.fromJson(Errors).errors:

View File

@@ -1,3 +1,4 @@
import jsony
from ../../types import Error from ../../types import Error
type type
@@ -5,7 +6,7 @@ type
url*: string url*: string
expandedUrl*: string expandedUrl*: string
displayUrl*: string displayUrl*: string
indices*: array[2, int] indices*: Slice[int]
ErrorObj* = object ErrorObj* = object
code*: Error code*: Error
@@ -18,3 +19,8 @@ proc contains*(codes: set[Error]; errors: Errors): bool =
for e in errors.errors: for e in errors.errors:
if e.code in codes: if e.code in codes:
return true return true
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -1,48 +1,12 @@
import options, strutils import user
from ../../types import User, VerifiedType
type type
GraphUser* = object GraphUser* = object
data*: tuple[userResult: Option[UserData], user: Option[UserData]] data*: tuple[user: UserData]
UserData* = object UserData* = object
result*: UserResult result*: UserResult
UserCore* = object UserResult = object
name*: string legacy*: RawUser
screenName*: string
createdAt*: string
UserBio* = object
description*: string
UserAvatar* = object
imageUrl*: string
Verification* = object
verifiedType*: VerifiedType
Location* = object
location*: string
Privacy* = object
protected*: bool
UserResult* = object
legacy*: User
restId*: string restId*: string
isBlueVerified*: bool
core*: UserCore
avatar*: UserAvatar
unavailableReason*: Option[string]
reason*: Option[string]
privacy*: Option[Privacy]
profileBio*: Option[UserBio]
verification*: Option[Verification]
location*: Option[Location]
proc enumHook*(s: string; v: var VerifiedType) =
v = try:
parseEnum[VerifiedType](s)
except:
VerifiedType.none

View File

@@ -0,0 +1,15 @@
import options
from ../../types import VideoType, VideoVariant
type
MediaType* = enum
photo, video, animatedGif
MediaEntity* = object
kind*: MediaType
mediaUrlHttps*: string
videoInfo*: Option[VideoInfo]
VideoInfo* = object
durationMillis*: int
variants*: seq[VideoVariant]

View File

@@ -1,9 +0,0 @@
type
RawSession* = object
kind*: string
id*: string
username*: string
oauthToken*: string
oauthTokenSecret*: string
authToken*: string
ct0*: string

View File

@@ -1,4 +0,0 @@
type
TidPair* = object
animationKey*: string
verification*: string

View File

@@ -0,0 +1,28 @@
import std/tables
import user, tweet
type
Search* = object
globalObjects*: GlobalObjects
timeline*: Timeline
GlobalObjects* = object
users*: Table[string, RawUser]
tweets*: Table[string, RawTweet]
Timeline = object
instructions*: seq[Instructions]
Instructions = object
addEntries*: tuple[entries: seq[Entry]]
Entry* = object
entryId*: string
content*: tuple[operation: Operation]
Operation = object
cursor*: tuple[value, cursorType: string]
proc renameHook*(v: var Entity; fieldName: var string) =
if fieldName == "type":
fieldName = "kind"

View File

@@ -0,0 +1,85 @@
import options
import jsony
from json import JsonNode
import user, media, common
type
RawTweet* = object
createdAt*: string
idStr*: string
fullText*: string
displayTextRange*: array[2, int]
entities*: Entities
extendedEntities*: ExtendedEntities
inReplyToStatusIdStr*: string
inReplyToScreenName*: string
userIdStr*: string
isQuoteStatus*: bool
replyCount*: int
retweetCount*: int
favoriteCount*: int
quoteCount*: int
conversationIdStr*: string
favorited*: bool
retweeted*: bool
selfThread*: tuple[idStr: string]
card*: Option[Card]
quotedStatusIdStr*: string
retweetedStatusIdStr*: string
Card* = object
name*: string
url*: string
bindingValues*: JsonNode
Entities* = object
hashtags*: seq[Hashtag]
symbols*: seq[Hashtag]
userMentions*: seq[UserMention]
urls*: seq[Url]
media*: seq[Entity]
Hashtag* = object
indices*: Slice[int]
UserMention* = object
screenName*: string
name*: string
indices*: Slice[int]
ExtendedEntities* = object
media*: seq[Entity]
Entity* = object
kind*: MediaType
indices*: Slice[int]
mediaUrlHttps*: string
url*: string
expandedUrl*: string
videoInfo*: VideoInfo
ext*: Ext
extMediaAvailability*: tuple[status: string]
extAltText*: string
additionalMediaInfo*: AdditionalMediaInfo
sourceStatusIdStr*: string
sourceUserIdStr*: string
AdditionalMediaInfo* = object
sourceUser*: Option[RawUser]
title*: string
description*: string
Ext* = object
mediaStats*: JsonNode
MediaStats* = object
ok*: tuple[viewCount: string]
proc renameHook*(v: var Entity; fieldName: var string) =
if fieldName == "type":
fieldName = "kind"
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -1,10 +1,7 @@
import std/[options, tables, times] import options, tables
import jsony import media as mediaTypes
from ../../types import VideoType, VideoVariant, User
type type
Text* = distinct string
UnifiedCard* = object UnifiedCard* = object
componentObjects*: Table[string, Component] componentObjects*: Table[string, Component]
destinationObjects*: Table[string, Destination] destinationObjects*: Table[string, Destination]
@@ -16,14 +13,10 @@ type
media media
swipeableMedia swipeableMedia
buttonGroup buttonGroup
jobDetails
appStoreDetails appStoreDetails
twitterListDetails twitterListDetails
communityDetails communityDetails
mediaWithDetailsHorizontal mediaWithDetailsHorizontal
hidden
grokShare
unknown
Component* = object Component* = object
kind*: ComponentType kind*: ComponentType
@@ -34,40 +27,24 @@ type
appId*: string appId*: string
mediaId*: string mediaId*: string
destination*: string destination*: string
location*: string
title*: Text title*: Text
subtitle*: Text subtitle*: Text
name*: Text name*: Text
memberCount*: int memberCount*: int
mediaList*: seq[MediaItem] mediaList*: seq[MediaItem]
topicDetail*: tuple[title: Text] topicDetail*: tuple[title: Text]
profileUser*: User
shortDescriptionText*: string
conversationPreview*: seq[GrokConversation]
MediaItem* = object MediaItem* = object
id*: string id*: string
destination*: string destination*: string
Destination* = object
kind*: string
data*: tuple[urlData: UrlData]
UrlData* = object UrlData* = object
url*: string url*: string
vanity*: string vanity*: string
MediaType* = enum Destination* = object
photo, video, model3d kind*: string
data*: tuple[urlData: UrlData]
MediaEntity* = object
kind*: MediaType
mediaUrlHttps*: string
videoInfo*: Option[VideoInfo]
VideoInfo* = object
durationMillis*: int
variants*: seq[VideoVariant]
AppType* = enum AppType* = enum
androidApp, iPhoneApp, iPadApp androidApp, iPhoneApp, iPadApp
@@ -78,58 +55,13 @@ type
title*: Text title*: Text
category*: Text category*: Text
GrokConversation* = object Text = object
message*: string content: string
sender*: string
TypeField = Component | Destination | MediaEntity | AppStoreData HasTypeField = Component | Destination | MediaEntity | AppStoreData
converter fromText*(text: Text): string = string(text) converter fromText*(text: Text): string = text.content
proc renameHook*(v: var TypeField; fieldName: var string) = proc renameHook*(v: var HasTypeField; fieldName: var string) =
if fieldName == "type": if fieldName == "type":
fieldName = "kind" fieldName = "kind"
proc enumHook*(s: string; v: var ComponentType) =
v = case s
of "details": details
of "media": media
of "swipeable_media": swipeableMedia
of "button_group": buttonGroup
of "job_details": jobDetails
of "app_store_details": appStoreDetails
of "twitter_list_details": twitterListDetails
of "community_details": communityDetails
of "media_with_details_horizontal": mediaWithDetailsHorizontal
of "commerce_drop_details": hidden
of "grok_share": grokShare
else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown
proc enumHook*(s: string; v: var AppType) =
v = case s
of "android_app": androidApp
of "iphone_app": iPhoneApp
of "ipad_app": iPadApp
else: echo "ERROR: Unknown enum value (AppType): ", s; androidApp
proc enumHook*(s: string; v: var MediaType) =
v = case s
of "video": video
of "photo": photo
of "model3d": model3d
else: echo "ERROR: Unknown enum value (MediaType): ", s; photo
proc parseHook*(s: string; i: var int; v: var DateTime) =
var str: string
parseHook(s, i, str)
v = parse(str, "yyyy-MM-dd hh:mm:ss")
proc parseHook*(s: string; i: var int; v: var Text) =
if s[i] == '"':
var str: string
parseHook(s, i, str)
v = Text(str)
else:
var t: tuple[content: string]
parseHook(s, i, t)
v = Text(t.content)

View File

@@ -1,6 +1,6 @@
import options import options
import jsony
import common import common
from ../../types import VerifiedType
type type
RawUser* = object RawUser* = object
@@ -16,7 +16,7 @@ type
favouritesCount*: int favouritesCount*: int
statusesCount*: int statusesCount*: int
mediaCount*: int mediaCount*: int
verifiedType*: VerifiedType verified*: bool
protected*: bool protected*: bool
profileLinkColor*: string profileLinkColor*: string
profileBannerUrl*: string profileBannerUrl*: string
@@ -42,3 +42,8 @@ type
Color* = object Color* = object
red*, green*, blue*: int red*, green*, blue*: int
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -1,20 +1,19 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, times, uri, tables, xmltree, htmlparser, htmlgen, math import strutils, strformat, times, uri, tables, xmltree, htmlparser, htmlgen
import std/[enumerate, re] import std/[enumerate, re]
import types, utils, query import types, utils, query
const const
cards = "cards.twitter.com/cards" cards = "cards.twitter.com/cards"
tco = "https://t.co" tco = "https://t.co"
twitter = parseUri("https://x.com") twitter = parseUri("https://twitter.com")
let let
twRegex = re"(?<=(?<!\S)https:\/\/|(?<=\s))(www\.|mobile\.)?twitter\.com" twRegex = re"(?<=(?<!\S)https:\/\/|(?<=\s))(www\.|mobile\.)?twitter\.com"
twLinkRegex = re"""<a href="https:\/\/twitter.com([^"]+)">twitter\.com(\S+)</a>""" twLinkRegex = re"""<a href="https:\/\/twitter.com([^"]+)">twitter\.com(\S+)</a>"""
xRegex = re"(?<=(?<!\S)https:\/\/|(?<=\s))(www\.|mobile\.)?x\.com"
xLinkRegex = re"""<a href="https:\/\/x.com([^"]+)">x\.com(\S+)</a>"""
ytRegex = re(r"([A-z.]+\.)?youtu(be\.com|\.be)", {reStudy, reIgnoreCase}) ytRegex = re"([A-z.]+\.)?youtu(be\.com|\.be)"
igRegex = re"(www\.)?instagram\.com"
rdRegex = re"(?<![.b])((www|np|new|amp|old)\.)?reddit.com" rdRegex = re"(?<![.b])((www|np|new|amp|old)\.)?reddit.com"
rdShortRegex = re"(?<![.b])redd\.it\/" rdShortRegex = re"(?<![.b])redd\.it\/"
@@ -33,13 +32,10 @@ proc getUrlPrefix*(cfg: Config): string =
if cfg.useHttps: https & cfg.hostname if cfg.useHttps: https & cfg.hostname
else: "http://" & cfg.hostname else: "http://" & cfg.hostname
proc shorten*(text: string; length=28): string = proc shortLink*(text: string; length=28): string =
result = text result = text.replace(wwwRegex, "")
if result.len > length: if result.len > length:
result = result[0 ..< length] & "" result = result[0 ..< length] & ""
proc shortLink*(text: string; length=28): string =
result = text.replace(wwwRegex, "").shorten(length)
proc stripHtml*(text: string; shorten=false): string = proc stripHtml*(text: string; shorten=false): string =
var html = parseHtml(text) var html = parseHtml(text)
@@ -59,63 +55,43 @@ proc replaceUrls*(body: string; prefs: Prefs; absolute=""): string =
result = body result = body
if prefs.replaceYouTube.len > 0 and "youtu" in result: if prefs.replaceYouTube.len > 0 and "youtu" in result:
let youtubeHost = strip(prefs.replaceYouTube, chars={'/'}) result = result.replace(ytRegex, prefs.replaceYouTube)
result = result.replace(ytRegex, youtubeHost) if prefs.replaceYouTube in result:
result = result.replace("/c/", "/")
if prefs.replaceTwitter.len > 0: if prefs.replaceTwitter.len > 0 and ("twitter.com" in body or tco in body):
let twitterHost = strip(prefs.replaceTwitter, chars={'/'}) result = result.replace(tco, https & prefs.replaceTwitter & "/t.co")
if tco in result: result = result.replace(cards, prefs.replaceTwitter & "/cards")
result = result.replace(tco, https & twitterHost & "/t.co") result = result.replace(twRegex, prefs.replaceTwitter)
if "x.com" in result: result = result.replacef(twLinkRegex, a(
result = result.replace(xRegex, twitterHost) prefs.replaceTwitter & "$2", href = https & prefs.replaceTwitter & "$1"))
result = result.replacef(xLinkRegex, a(
twitterHost & "$2", href = https & twitterHost & "$1"))
if "twitter.com" in result:
result = result.replace(cards, twitterHost & "/cards")
result = result.replace(twRegex, twitterHost)
result = result.replacef(twLinkRegex, a(
twitterHost & "$2", href = https & twitterHost & "$1"))
if prefs.replaceReddit.len > 0 and ("reddit.com" in result or "redd.it" in result): if prefs.replaceReddit.len > 0 and ("reddit.com" in result or "redd.it" in result):
let redditHost = strip(prefs.replaceReddit, chars={'/'}) result = result.replace(rdShortRegex, prefs.replaceReddit & "/comments/")
result = result.replace(rdShortRegex, redditHost & "/comments/") result = result.replace(rdRegex, prefs.replaceReddit)
result = result.replace(rdRegex, redditHost) if prefs.replaceReddit in result and "/gallery/" in result:
if redditHost in result and "/gallery/" in result:
result = result.replace("/gallery/", "/comments/") result = result.replace("/gallery/", "/comments/")
if prefs.replaceInstagram.len > 0 and "instagram.com" in result:
result = result.replace(igRegex, prefs.replaceInstagram)
if absolute.len > 0 and "href" in result: if absolute.len > 0 and "href" in result:
result = result.replace("href=\"/", &"href=\"{absolute}/") result = result.replace("href=\"/", "href=\"" & absolute & "/")
proc getM3u8Url*(content: string): string = proc getM3u8Url*(content: string): string =
var matches: array[1, string] var matches: array[1, string]
if re.find(content, m3u8Regex, matches) != -1: if re.find(content, m3u8Regex, matches) != -1:
result = matches[0] result = matches[0]
proc proxifyVideo*(manifest: string; proxy: bool; manifestUrl = ""): string = proc proxifyVideo*(manifest: string; proxy: bool): string =
let (baseUrl, basePath) =
if manifestUrl.len > 0:
let
u = parseUri(manifestUrl)
origin = u.scheme & "://" & u.hostname
idx = manifestUrl.rfind('/')
dirPath = if idx > 8: manifestUrl[0 .. idx] else: ""
(origin, dirPath)
else:
("https://video.twimg.com", "")
var replacements: seq[(string, string)] var replacements: seq[(string, string)]
for line in manifest.splitLines: for line in manifest.splitLines:
let url = let url =
if line.startsWith("#EXT-X-MAP:URI"): line[16 .. ^2] if line.startsWith("#EXT-X-MAP:URI"): line[16 .. ^2]
elif line.startsWith("#EXT-X-MEDIA") and "URI=" in line:
line[line.find("URI=") + 5 .. -1 + line.find("\"", start= 5 + line.find("URI="))]
else: line else: line
let resolved = if url.startsWith('/'):
if url.startsWith('/'): baseUrl & url let path = "https://video.twimg.com" & url
elif basePath.len > 0 and url.len > 0 and not url.startsWith('#') and replacements.add (url, if proxy: path.getVidUrl else: path)
not url.startsWith("http") and ('.' in url): basePath & url
else: ""
if resolved.len > 0:
replacements.add (url, if proxy: resolved.getVidUrl else: resolved)
return manifest.multiReplace(replacements) return manifest.multiReplace(replacements)
proc getUserPic*(userPic: string; style=""): string = proc getUserPic*(userPic: string; style=""): string =
@@ -168,30 +144,13 @@ proc getShortTime*(tweet: Tweet): string =
else: else:
result = "now" result = "now"
proc getDuration*(ms: int): string =
let
sec = int(round(ms / 1000))
min = floorDiv(sec, 60)
hour = floorDiv(min, 60)
if hour > 0:
&"{hour}:{min mod 60:02}:{sec mod 60:02}"
else:
&"{min mod 60}:{sec mod 60:02}"
proc getDuration*(video: Video): string =
getDuration(video.durationMs)
proc getLink*(id: int64; username="i"; focus=true): string =
var username = username
if username.len == 0:
username = "i"
result = &"/{username}/status/{id}"
if focus: result &= "#m"
proc getLink*(tweet: Tweet; focus=true): string = proc getLink*(tweet: Tweet; focus=true): string =
if tweet.id == 0: return if tweet.id == 0: return
var username = tweet.user.username var username = tweet.user.username
return getLink(tweet.id, username, focus) if username.len == 0:
username = "i"
result = &"/{username}/status/{tweet.id}"
if focus: result &= "#m"
proc getTwitterLink*(path: string; params: Table[string, string]): string = proc getTwitterLink*(path: string; params: Table[string, string]): string =
var var
@@ -219,7 +178,7 @@ proc getTwitterLink*(path: string; params: Table[string, string]): string =
proc getLocation*(u: User | Tweet): (string, string) = proc getLocation*(u: User | Tweet): (string, string) =
if "://" in u.location: return (u.location, "") if "://" in u.location: return (u.location, "")
let loc = u.location.split(":") let loc = u.location.split(":")
let url = if loc.len > 1: "/search?f=tweets&q=place:" & loc[1] else: "" let url = if loc.len > 1: "/search?q=place:" & loc[1] else: ""
(loc[0], url) (loc[0], url)
proc getSuspended*(username: string): string = proc getSuspended*(username: string): string =

View File

@@ -39,11 +39,8 @@ template use*(pool: HttpPool; heads: HttpHeaders; body: untyped): untyped =
try: try:
body body
except BadClientError, ProtocolError: except ProtocolError:
# Twitter returned 503 or closed the connection, we need a new client # Twitter closed the connection, retry
pool.release(c, true)
badClient = false
c = pool.acquire(heads)
body body
finally: finally:
pool.release(c, badClient) pool.release(c, badClient)

View File

@@ -6,22 +6,17 @@ from os import getEnv
import jester import jester
import types, config, prefs, formatters, redis_cache, http_pool, auth, apiutils import types, config, prefs, formatters, redis_cache, http_pool, tokens
import views/[general, about] import views/[general, about]
import routes/[ import routes/[
preferences, timeline, status, media, search, rss, list, debug, preferences, timeline, status, media, search, rss, list, debug,
unsupported, embed, resolver, broadcast, router_utils] unsupported, embed, resolver, router_utils]
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances" const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
const issuesUrl = "https://github.com/zedeus/nitter/issues" const issuesUrl = "https://github.com/zedeus/nitter/issues"
let let configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf")
configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf") let (cfg, fullCfg) = getConfig(configPath)
(cfg, fullCfg) = getConfig(configPath)
sessionsPath = getEnv("NITTER_SESSIONS_FILE", "./sessions.jsonl")
initSessionPool(cfg, sessionsPath)
if not cfg.enableDebug: if not cfg.enableDebug:
# Silence Jester's query warning # Silence Jester's query warning
@@ -37,17 +32,14 @@ setHmacKey(cfg.hmacKey)
setProxyEncoding(cfg.base64Media) setProxyEncoding(cfg.base64Media)
setMaxHttpConns(cfg.httpMaxConns) setMaxHttpConns(cfg.httpMaxConns)
setHttpProxy(cfg.proxy, cfg.proxyAuth) setHttpProxy(cfg.proxy, cfg.proxyAuth)
setApiProxy(cfg.apiProxy)
setDisableTid(cfg.disableTid)
setMaxConcurrentReqs(cfg.maxConcurrentReqs)
setMaxRetries(cfg.maxRetries)
setRetryDelayMs(cfg.retryDelayMs)
initAboutPage(cfg.staticDir) initAboutPage(cfg.staticDir)
waitFor initRedisPool(cfg) waitFor initRedisPool(cfg)
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n" stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
stdout.flushFile stdout.flushFile
asyncCheck initTokenPool(cfg)
createUnsupportedRouter(cfg) createUnsupportedRouter(cfg)
createResolverRouter(cfg) createResolverRouter(cfg)
createPrefRouter(cfg) createPrefRouter(cfg)
@@ -58,26 +50,19 @@ createSearchRouter(cfg)
createMediaRouter(cfg) createMediaRouter(cfg)
createEmbedRouter(cfg) createEmbedRouter(cfg)
createRssRouter(cfg) createRssRouter(cfg)
createBroadcastRouter(cfg)
createDebugRouter(cfg) createDebugRouter(cfg)
settings: settings:
port = Port(cfg.port) port = Port(cfg.port)
staticDir = cfg.staticDir staticDir = cfg.staticDir
bindAddr = cfg.address bindAddr = cfg.address
reusePort = true
routes: routes:
before:
# skip all file URLs
cond "." notin request.path
applyUrlPrefs()
get "/": get "/":
resp renderMain(renderSearch(), request, cfg, requestPrefs()) resp renderMain(renderSearch(), request, cfg, themePrefs())
get "/about": get "/about":
resp renderMain(renderAbout(), request, cfg, requestPrefs()) resp renderMain(renderAbout(), request, cfg, themePrefs())
get "/explore": get "/explore":
redirect("/about") redirect("/about")
@@ -88,7 +73,7 @@ routes:
get "/i/redirect": get "/i/redirect":
let url = decodeUrl(@"url") let url = decodeUrl(@"url")
if url.len == 0: resp Http404 if url.len == 0: resp Http404
redirect(replaceUrls(url, requestPrefs())) redirect(replaceUrls(url, cookiePrefs()))
error Http404: error Http404:
resp Http404, showError("Page not found", cfg) resp Http404, showError("Page not found", cfg)
@@ -99,29 +84,20 @@ routes:
resp Http500, showError( resp Http500, showError(
&"An error occurred, please {link} with the URL you tried to visit.", cfg) &"An error occurred, please {link} with the URL you tried to visit.", cfg)
error BadClientError:
echo error.exc.name, ": ", error.exc.msg
resp Http500, showError("Network error occurred, please try again.", cfg)
error RateLimitError: error RateLimitError:
echo error.exc.name, ": ", error.exc.msg
const link = a("another instance", href = instancesUrl) const link = a("another instance", href = instancesUrl)
resp Http429, showError( resp Http429, showError(
&"Instance has been rate limited.<br>Use {link} or try again later.", cfg) &"Instance has been rate limited.<br>Use {link} or try again later.", cfg)
error NoSessionsError: extend unsupported, ""
const link = a("another instance", href = instancesUrl)
resp Http429, showError(
&"Instance has no auth tokens, or is fully rate limited.<br>Use {link} or try again later.", cfg)
extend rss, ""
extend status, ""
extend search, ""
extend timeline, ""
extend media, ""
extend list, ""
extend preferences, "" extend preferences, ""
extend resolver, "" extend resolver, ""
extend rss, ""
extend search, ""
extend timeline, ""
extend list, ""
extend status, ""
extend media, ""
extend embed, "" extend embed, ""
extend broadcastRoute, ""
extend debug, "" extend debug, ""
extend unsupported, ""

View File

@@ -1,21 +1,9 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils, options, times, math, tables import strutils, options, tables, times, math
import packedjson, packedjson/deserialiser import packedjson, packedjson/deserialiser
import types, parserutils, utils import types, parserutils, utils
import experimental/parser/unifiedcard import experimental/parser/unifiedcard
proc parseGraphTweet(js: JsonNode): Tweet
proc parseVerifiedType(s: string; current: VerifiedType): VerifiedType =
try: parseEnum[VerifiedType](s)
except ValueError: current
proc parseCommunityNote(js: JsonNode): string =
let subtitle = js{"subtitle"}
result = subtitle{"text"}.getStr
with entities, subtitle{"entities"}:
result = expandBirdwatchEntities(result, entities)
proc parseUser(js: JsonNode; id=""): User = proc parseUser(js: JsonNode; id=""): User =
if js.isNull: return if js.isNull: return
result = User( result = User(
@@ -31,105 +19,13 @@ proc parseUser(js: JsonNode; id=""): User =
tweets: js{"statuses_count"}.getInt, tweets: js{"statuses_count"}.getInt,
likes: js{"favourites_count"}.getInt, likes: js{"favourites_count"}.getInt,
media: js{"media_count"}.getInt, media: js{"media_count"}.getInt,
protected: js{"protected"}.getBool(js{"privacy", "protected"}.getBool), verified: js{"verified"}.getBool,
protected: js{"protected"}.getBool,
joinDate: js{"created_at"}.getTime joinDate: js{"created_at"}.getTime
) )
if js{"is_blue_verified"}.getBool(false):
result.verifiedType = blue
with verifiedType, js{"verified_type"}:
result.verifiedType = parseVerifiedType(verifiedType.getStr, result.verifiedType)
result.expandUserEntities(js) result.expandUserEntities(js)
proc parseGraphUser(js: JsonNode): User =
var user = js{"user_result", "result"}
if user.isNull:
user = ? js{"user_results", "result"}
if user.isNull:
if js{"core"}.notNull and js{"legacy"}.notNull:
user = js
else:
return
result = parseUser(user{"legacy"}, user{"rest_id"}.getStr)
if result.verifiedType == none and user{"is_blue_verified"}.getBool(false):
result.verifiedType = blue
# fallback to support UserMedia/recent GraphQL updates
if result.username.len == 0:
result.username = user{"core", "screen_name"}.getStr
result.fullname = user{"core", "name"}.getStr
result.userPic = user{"avatar", "image_url"}.getImageStr.replace("_normal", "")
if user{"is_blue_verified"}.getBool(
user{"verification", "is_blue_verified"}.getBool(false)):
result.verifiedType = blue
with verifiedType, user{"verification", "verified_type"}:
result.verifiedType = parseVerifiedType(verifiedType.getStr, result.verifiedType)
proc parseAboutAccount*(js: JsonNode): AccountInfo =
if js.isNull: return
let user = ? js{"data", "user_result_by_screen_name", "result"}
if user{"unavailable_reason"}.getStr == "Suspended":
result.suspended = true
return
result = AccountInfo(
username: user{"core", "screen_name"}.getStr,
fullname: user{"core", "name"}.getStr,
joinDate: user{"core", "created_at"}.getTime,
userPic: user{"avatar", "image_url"}.getImageStr.replace("_normal", ""),
affiliateLabel: user{"identity_profile_labels_highlighted_label", "label", "description"}.getStr,
)
if user{"is_blue_verified"}.getBool(false):
result.verifiedType = blue
with verifiedType, user{"verification", "verified_type"}:
result.verifiedType = parseVerifiedType(verifiedType.getStr, result.verifiedType)
with about, user{"about_profile"}:
result.basedIn = about{"account_based_in"}.getStr
result.source = about{"source"}.getStr
result.affiliateUsername = about{"affiliate_username"}.getStr
try:
result.usernameChanges = about{"username_changes", "count"}.getStr("0").parseInt
except ValueError:
discard
with lastChange, about{"username_changes", "last_changed_at_msec"}:
result.lastUsernameChange = lastChange.getTimeFromMsStr
with info, user{"verification_info"}:
result.isIdentityVerified = info{"is_identity_verified"}.getBool
with reason, info{"reason"}:
result.overrideVerifiedYear = reason{"override_verified_year"}.getInt
with since, reason{"verified_since_msec"}:
result.verifiedSince = since.getTimeFromMsStr
proc parseBroadcastInfo*(js: JsonNode): Broadcast =
let bc = ? js{"data", "broadcast"}
result = Broadcast(
id: bc{"broadcast_id"}.getStr,
title: bc{"status"}.getStr,
state: bc{"state"}.getStr.toUpperAscii,
thumb: bc{"image_url"}.getStr,
mediaKey: bc{"media_key"}.getStr,
totalWatched: bc{"total_watched"}.getInt,
startTime: bc{"start_time"}.getTimeFromMs,
endTime: bc{"end_time"}.getTimeFromMs,
replayStart: bc{"edited_replay", "start_time"}.getInt,
availableForReplay: bc{"available_for_replay"}.getBool,
user: parseGraphUser(bc)
)
proc parseGraphList*(js: JsonNode): List = proc parseGraphList*(js: JsonNode): List =
if js.isNull: return if js.isNull: return
@@ -142,13 +38,14 @@ proc parseGraphList*(js: JsonNode): List =
result = List( result = List(
id: list{"id_str"}.getStr, id: list{"id_str"}.getStr,
name: list{"name"}.getStr, name: list{"name"}.getStr,
username: list{"user_results", "result", "legacy", "screen_name"}.getStr, username: list{"user", "legacy", "screen_name"}.getStr,
userId: list{"user_results", "result", "rest_id"}.getStr, userId: list{"user", "rest_id"}.getStr,
description: list{"description"}.getStr, description: list{"description"}.getStr,
members: list{"member_count"}.getInt, members: list{"member_count"}.getInt,
banner: list{"custom_banner_media", "media_info", "original_img_url"}.getImageStr banner: list{"custom_banner_media", "media_info", "url"}.getImageStr
) )
proc parsePoll(js: JsonNode): Poll = proc parsePoll(js: JsonNode): Poll =
let vals = js{"binding_values"} let vals = js{"binding_values"}
# name format is pollNchoice_* # name format is pollNchoice_*
@@ -167,124 +64,34 @@ proc parsePoll(js: JsonNode): Poll =
result.leader = result.values.find(max(result.values)) result.leader = result.values.find(max(result.values))
result.votes = result.values.sum result.votes = result.values.sum
proc parseVideoVariants(variants: JsonNode): seq[VideoVariant] = proc parseGif(js: JsonNode): Gif =
result = @[] result = Gif(
for v in variants: url: js{"video_info", "variants"}[0]{"url"}.getImageStr,
let thumb: js{"media_url_https"}.getImageStr
url = v{"url"}.getStr )
contentType = parseEnum[VideoType](v{"content_type"}.getStr("video/mp4"))
bitrate = v{"bit_rate"}.getInt(v{"bitrate"}.getInt(0))
result.add VideoVariant(
contentType: contentType,
bitrate: bitrate,
url: url,
resolution: if contentType == mp4: getMp4Resolution(url) else: 0
)
proc parseVideo(js: JsonNode): Video = proc parseVideo(js: JsonNode): Video =
result = Video( result = Video(
thumb: js{"media_url_https"}.getImageStr, thumb: js{"media_url_https"}.getImageStr,
available: true, views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr,
available: js{"ext_media_availability", "status"}.getStr == "available",
title: js{"ext_alt_text"}.getStr, title: js{"ext_alt_text"}.getStr,
durationMs: js{"video_info", "duration_millis"}.getInt durationMs: js{"video_info", "duration_millis"}.getInt
# playbackType: mp4 # playbackType: mp4
) )
with status, js{"ext_media_availability", "status"}:
if status.getStr.len > 0 and status.getStr.toLowerAscii != "available":
result.available = false
with title, js{"additional_media_info", "title"}: with title, js{"additional_media_info", "title"}:
result.title = title.getStr result.title = title.getStr
with description, js{"additional_media_info", "description"}: with description, js{"additional_media_info", "description"}:
result.description = description.getStr result.description = description.getStr
result.variants = parseVideoVariants(js{"video_info", "variants"}) for v in js{"video_info", "variants"}:
result.variants.add VideoVariant(
proc addMedia(media: var MediaEntities; photo: Photo) = contentType: parseEnum[VideoType](v{"content_type"}.getStr("summary")),
media.add Media(kind: photoMedia, photo: photo) bitrate: v{"bitrate"}.getInt,
url: v{"url"}.getStr
proc addMedia(media: var MediaEntities; video: Video) = )
media.add Media(kind: videoMedia, video: video)
proc addMedia(media: var MediaEntities; gif: Gif) =
media.add Media(kind: gifMedia, gif: gif)
proc parseLegacyMediaEntities(js: JsonNode; result: var Tweet) =
with jsMedia, js{"extended_entities", "media"}:
for m in jsMedia:
case m.getTypeName:
of "photo":
result.media.addMedia(Photo(
url: m{"media_url_https"}.getImageStr,
altText: m{"ext_alt_text"}.getStr
))
of "video":
result.media.addMedia(parseVideo(m))
with user, m{"additional_media_info", "source_user"}:
if user{"id"}.getInt > 0:
result.attribution = some(parseUser(user))
else:
result.attribution = some(parseGraphUser(user))
of "animated_gif":
result.media.addMedia(Gif(
url: m{"video_info", "variants"}[0]{"url"}.getImageStr,
thumb: m{"media_url_https"}.getImageStr,
altText: m{"ext_alt_text"}.getStr
))
else: discard
with url, m{"url"}:
if result.text.endsWith(url.getStr):
result.text.removeSuffix(url.getStr)
result.text = result.text.strip()
proc parseMediaEntities(js: JsonNode; result: var Tweet) =
with mediaEntities, js{"media_entities"}:
var parsedMedia: MediaEntities
for mediaEntity in mediaEntities:
with mediaInfo, mediaEntity{"media_results", "result", "media_info"}:
case mediaInfo.getTypeName
of "ApiImage":
parsedMedia.addMedia(Photo(
url: mediaInfo{"original_img_url"}.getImageStr,
altText: mediaInfo{"alt_text"}.getStr
))
of "ApiVideo":
let status = mediaEntity{"media_results", "result", "media_availability_v2", "status"}
parsedMedia.addMedia(Video(
available: status.getStr == "Available",
thumb: mediaInfo{"preview_image", "original_img_url"}.getImageStr,
title: mediaInfo{"alt_text"}.getStr,
durationMs: mediaInfo{"duration_millis"}.getInt,
variants: parseVideoVariants(mediaInfo{"variants"})
))
of "ApiGif":
parsedMedia.addMedia(Gif(
url: mediaInfo{"variants"}[0]{"url"}.getImageStr,
thumb: mediaInfo{"preview_image", "original_img_url"}.getImageStr,
altText: mediaInfo{"alt_text"}.getStr
))
else: discard
if "expanded_url" in mediaEntity:
let expandedUrl = js.getExpandedUrl
if result.text.endsWith(expandedUrl):
result.text.removeSuffix(expandedUrl)
result.text = result.text.strip()
if mediaEntities.len > 0 and parsedMedia.len == mediaEntities.len:
result.media = parsedMedia
# Remove media URLs from text
with mediaList, js{"legacy", "entities", "media"}:
for url in mediaList:
let expandedUrl = url.getExpandedUrl
if result.text.endsWith(expandedUrl):
result.text.removeSuffix(expandedUrl)
result.text = result.text.strip()
proc parsePromoVideo(js: JsonNode): Video = proc parsePromoVideo(js: JsonNode): Video =
result = Video( result = Video(
@@ -307,23 +114,14 @@ proc parsePromoVideo(js: JsonNode): Video =
result.variants.add variant result.variants.add variant
proc parseBroadcast(js: JsonNode): Card = proc parseBroadcast(js: JsonNode): Card =
let let image = js{"broadcast_thumbnail_large"}.getImageVal
image = js{"broadcast_thumbnail_large"}.getImageVal
broadcastUrl = js{"broadcast_url"}.getStrVal
broadcastId = broadcastUrl.rsplit('/', maxsplit=1)[^1]
streamUrl = "/i/broadcasts/" & broadcastId & "/stream"
result = Card( result = Card(
kind: broadcast, kind: broadcast,
url: "/i/broadcasts/" & broadcastId, url: js{"broadcast_url"}.getStrVal,
title: js{"broadcaster_display_name"}.getStrVal, title: js{"broadcaster_display_name"}.getStrVal,
text: js{"broadcast_title"}.getStrVal, text: js{"broadcast_title"}.getStrVal,
image: image, image: image,
video: some Video( video: some Video(thumb: image)
thumb: image,
available: true,
playbackType: m3u8,
variants: @[VideoVariant(contentType: m3u8, url: streamUrl)]
)
) )
proc parseCard(js: JsonNode; urls: JsonNode): Card = proc parseCard(js: JsonNode; urls: JsonNode): Card =
@@ -373,7 +171,7 @@ proc parseCard(js: JsonNode; urls: JsonNode): Card =
for u in ? urls: for u in ? urls:
if u{"url"}.getStr == result.url: if u{"url"}.getStr == result.url:
result.url = u.getExpandedUrl(result.url) result.url = u{"expanded_url"}.getStr
break break
if kind in {videoDirectMessage, imageDirectMessage}: if kind in {videoDirectMessage, imageDirectMessage}:
@@ -383,20 +181,14 @@ proc parseCard(js: JsonNode; urls: JsonNode): Card =
result.url.len == 0 or result.url.startsWith("card://"): result.url.len == 0 or result.url.startsWith("card://"):
result.url = getPicUrl(result.image) result.url = getPicUrl(result.image)
proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull(); proc parseTweet(js: JsonNode): Tweet =
replyId: int64 = 0): Tweet = if js.isNull: return
if js.isNull: return Tweet()
let time =
if js{"created_at"}.notNull: js{"created_at"}.getTime
else: js{"created_at_ms"}.getTimeFromMs
result = Tweet( result = Tweet(
id: js{"id_str"}.getId, id: js{"id_str"}.getId,
threadId: js{"conversation_id_str"}.getId, threadId: js{"conversation_id_str"}.getId,
replyId: js{"in_reply_to_status_id_str"}.getId, replyId: js{"in_reply_to_status_id_str"}.getId,
text: js{"full_text"}.getStr, text: js{"full_text"}.getStr,
time: time, time: js{"created_at"}.getTime,
hasThread: js{"self_thread"}.notNull, hasThread: js{"self_thread"}.notNull,
available: true, available: true,
user: User(id: js{"user_id_str"}.getStr), user: User(id: js{"user_id_str"}.getStr),
@@ -404,56 +196,43 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull();
replies: js{"reply_count"}.getInt, replies: js{"reply_count"}.getInt,
retweets: js{"retweet_count"}.getInt, retweets: js{"retweet_count"}.getInt,
likes: js{"favorite_count"}.getInt, likes: js{"favorite_count"}.getInt,
views: js{"views_count"}.getInt quotes: js{"quote_count"}.getInt
) )
) )
if result.replyId == 0: result.expandTweetEntities(js)
result.replyId = replyId
# fix for pinned threads if js{"is_quote_status"}.getBool:
if result.hasThread and result.threadId == 0:
result.threadId = js{"self_thread", "id_str"}.getId
if "retweeted_status" in js:
result.retweet = some Tweet()
elif js{"is_quote_status"}.getBool:
result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId) result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId)
# legacy
with rt, js{"retweeted_status_id_str"}: with rt, js{"retweeted_status_id_str"}:
result.retweet = some Tweet(id: rt.getId) result.retweet = some Tweet(id: rt.getId)
return return
# graphql with jsCard, js{"card"}:
with rt, js{"retweeted_status_result", "result"}:
# needed due to weird edgecase where the actual tweet data isn't included
if "legacy" in rt:
result.retweet = some parseGraphTweet(rt)
return
with reposts, js{"repostedStatusResults"}:
with rt, reposts{"result"}:
if "legacy" in rt:
result.retweet = some parseGraphTweet(rt)
return
if jsCard.kind != JNull:
let name = jsCard{"name"}.getStr let name = jsCard{"name"}.getStr
if "poll" in name: if "poll" in name:
if "image" in name: if "image" in name:
result.media.addMedia(Photo( result.photos.add jsCard{"binding_values", "image_large"}.getImageVal
url: jsCard{"binding_values", "image_large"}.getImageVal
))
result.poll = some parsePoll(jsCard) result.poll = some parsePoll(jsCard)
elif name == "amplify": elif name == "amplify":
result.media.addMedia(parsePromoVideo(jsCard{"binding_values"})) result.video = some(parsePromoVideo(jsCard{"binding_values"}))
else: else:
result.card = some parseCard(jsCard, js{"entities", "urls"}) result.card = some parseCard(jsCard, js{"entities", "urls"})
result.expandTweetEntities(js) with jsMedia, js{"extended_entities", "media"}:
parseLegacyMediaEntities(js, result) for m in jsMedia:
case m{"type"}.getStr
of "photo":
result.photos.add m{"media_url_https"}.getImageStr
of "video":
result.video = some(parseVideo(m))
with user, m{"additional_media_info", "source_user"}:
result.attribution = some(parseUser(user))
of "animated_gif":
result.gif = some(parseGif(m))
else: discard
with jsWithheld, js{"withheld_in_countries"}: with jsWithheld, js{"withheld_in_countries"}:
let withheldInCountries: seq[string] = let withheldInCountries: seq[string] =
@@ -469,341 +248,159 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull();
result.text.removeSuffix(" Learn more.") result.text.removeSuffix(" Learn more.")
result.available = false result.available = false
proc parseGraphTweet(js: JsonNode): Tweet = proc finalizeTweet(global: GlobalObjects; id: string): Tweet =
if js.kind == JNull: let intId = if id.len > 0: parseBiggestInt(id) else: 0
return Tweet() result = global.tweets.getOrDefault(id, Tweet(id: intId))
case js.getTypeName: if result.quote.isSome:
of "TweetUnavailable": let quote = get(result.quote).id
return Tweet() if $quote in global.tweets:
of "TweetTombstone": result.quote = some global.tweets[$quote]
with text, select(js{"tombstone", "richText"}, js{"tombstone", "text"}):
return Tweet(text: text.getTombstone)
return Tweet()
of "TweetPreviewDisplay":
return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.")
of "TweetWithVisibilityResults":
return parseGraphTweet(js{"tweet"})
else:
discard
if "legacy" notin js and "rest_id" notin js:
return Tweet()
var jsCard = select(js{"card"}, js{"tweet_card"}, js{"legacy", "tweet_card"})
if jsCard.kind != JNull:
let legacyCard = jsCard{"legacy"}
if legacyCard.kind != JNull:
let bindingArray = legacyCard{"binding_values"}
if bindingArray.kind == JArray:
var bindingObj: seq[(string, JsonNode)]
for item in bindingArray:
bindingObj.add((item{"key"}.getStr, item{"value"}))
# Create a new card object with flattened structure
jsCard = %*{
"name": legacyCard{"name"},
"url": legacyCard{"url"},
"binding_values": %bindingObj
}
var replyId = 0
with restId, js{"reply_to_results", "rest_id"}:
replyId = restId.getId
if "details" in js:
result = Tweet(
id: js{"rest_id"}.getId,
available: true,
text: js{"details", "full_text"}.getStr,
time: js{"details", "created_at_ms"}.getTimeFromMs,
replyId: js{"reply_to_results", "rest_id"}.getId,
isAd: js{"content_disclosure", "advertising_disclosure", "is_paid_promotion"}.getBool,
isAI: js{"content_disclosure", "ai_generated_disclosure", "has_ai_generated_media"}.getBool,
stats: TweetStats(
replies: js{"counts", "reply_count"}.getInt,
retweets: js{"counts", "retweet_count"}.getInt,
likes: js{"counts", "favorite_count"}.getInt,
)
)
if jsCard.kind != JNull:
let name = jsCard{"name"}.getStr
if "poll" in name:
if "image" in name:
result.media.addMedia(Photo(
url: jsCard{"binding_values", "image_large"}.getImageVal
))
result.poll = some parsePoll(jsCard)
elif name == "amplify":
result.media.addMedia(parsePromoVideo(jsCard{"binding_values"}))
else:
result.card = some parseCard(jsCard, js{"url_entities"})
result.expandTweetEntitiesV2(js)
else:
result = parseTweet(js{"legacy"}, jsCard, replyId)
result.id = js{"rest_id"}.getId
result.user = parseGraphUser(js{"core"})
if result.reply.len == 0:
with replyTo, js{"reply_to_user_results", "result", "core", "screen_name"}:
result.reply = @[replyTo.getStr]
with count, js{"views", "count"}:
result.stats.views = count.getStr("0").parseInt
with noteTweet, js{"note_tweet", "note_tweet_results", "result"}:
result.expandNoteTweetEntities(noteTweet)
parseMediaEntities(js, result)
with quoted, js{"quoted_status_result", "result"}:
result.quote = some(parseGraphTweet(quoted))
with quoted, js{"quotedPostResults"}:
if "result" in quoted:
result.quote = some(parseGraphTweet(quoted{"result"}))
else: else:
result.quote = some Tweet(id: js{"legacy", "quoted_status_id_str"}.getId) result.quote = some Tweet()
with ids, js{"edit_control", "edit_control_initial", "edit_tweet_ids"}: if result.retweet.isSome:
for id in ids: let rt = get(result.retweet).id
result.history.add parseBiggestInt(id.getStr) if $rt in global.tweets:
result.retweet = some finalizeTweet(global, $rt)
else:
result.retweet = some Tweet()
with birdwatch, js{"birdwatch_pivot"}: proc parsePin(js: JsonNode; global: GlobalObjects): Tweet =
result.note = parseCommunityNote(birdwatch) let pin = js{"pinEntry", "entry", "entryId"}.getStr
if pin.len == 0: return
proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] = let id = pin.getId
for t in ? js{"content", "items"}: if id notin global.tweets: return
let entryId = t.getEntryId
if "tweet-" in entryId and "promoted" notin entryId:
let tweet = t.getTweetResult("item")
if tweet.notNull:
result.thread.content.add parseGraphTweet(tweet)
let tweetDisplayType = select( global.tweets[id].pinned = true
t{"item", "content", "tweet_display_type"}, return finalizeTweet(global, id)
t{"item", "itemContent", "tweetDisplayType"}
)
if tweetDisplayType.getStr == "SelfThread":
result.self = true
else:
result.thread.content.add Tweet(id: entryId.getId)
elif "cursor-showmore" in entryId:
let cursor = t{"item", "content", "value"}
result.thread.cursor = cursor.getStr
result.thread.hasMore = true
proc parseGraphTweetResult*(js: JsonNode): Tweet = proc parseGlobalObjects(js: JsonNode): GlobalObjects =
with tweet, js{"data", "tweet_result", "result"}: result = GlobalObjects()
result = parseGraphTweet(tweet) let
tweets = ? js{"globalObjects", "tweets"}
users = ? js{"globalObjects", "users"}
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation = for k, v in users:
result = Conversation(replies: Result[Chain](beginning: true)) result.users[k] = parseUser(v, k)
let instructions = ? select( for k, v in tweets:
js{"data", "timelineResponse", "instructions"}, var tweet = parseTweet(v)
js{"data", "timeline_response", "instructions"}, if tweet.user.id in result.users:
js{"data", "threaded_conversation_with_injections_v2", "instructions"} tweet.user = result.users[tweet.user.id]
) result.tweets[k] = tweet
if instructions.len == 0:
return
for i in instructions: proc parseThread(js: JsonNode; global: GlobalObjects): tuple[thread: Chain, self: bool] =
if i.getTypeName == "TimelineAddEntries": result.thread = Chain()
for e in i{"entries"}:
let entryId = e.getEntryId
if entryId.startsWith("tweet-"):
let tweetResult = getTweetResult(e)
if tweetResult.notNull:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available: let thread = js{"content", "item", "content", "conversationThread"}
tweet.id = entryId.getId with cursor, thread{"showMoreCursor"}:
result.thread.cursor = cursor{"value"}.getStr
result.thread.hasMore = true
if entryId.endsWith(tweetId): for t in thread{"conversationComponents"}:
result.tweet = tweet let content = t{"conversationTweetComponent", "tweet"}
else:
result.before.content.add tweet
elif not entryId.endsWith(tweetId):
result.before.content.add Tweet(id: entryId.getId)
elif entryId.startsWith("conversationthread"):
let (thread, self) = parseGraphThread(e)
if self:
result.after = thread
elif thread.content.len > 0:
result.replies.content.add thread
elif entryId.startsWith("tombstone"):
let
content = select(e{"content", "content"}, e{"content", "itemContent"})
tweet = Tweet(
id: entryId.getId,
available: false,
text: content{"tombstoneInfo", "richText"}.getTombstone
)
if $tweet.id == tweetId: if content{"displayType"}.getStr == "SelfThread":
result.tweet = tweet result.self = true
else:
result.before.content.add tweet
elif entryId.startsWith("cursor-bottom"):
var cursorValue = select(
e{"content", "value"},
e{"content", "content", "value"},
e{"content", "itemContent", "value"}
)
result.replies.bottom = cursorValue.getStr
proc parseGraphEditHistory*(js: JsonNode; tweetId: string): EditHistory = var tweet = finalizeTweet(global, content{"id"}.getStr)
let instructions = ? js{
"data", "tweet_result_by_rest_id", "result",
"edit_history_timeline", "timeline", "instructions"
}
if instructions.len == 0:
return
for i in instructions:
if i.getTypeName == "TimelineAddEntries":
for e in i{"entries"}:
let entryId = e.getEntryId
if entryId == "latestTweet":
with item, e{"content", "items"}[0]:
let tweetResult = item.getTweetResult("item")
if tweetResult.notNull:
result.latest = parseGraphTweet(tweetResult)
elif entryId == "staleTweets":
for item in e{"content", "items"}:
let tweetResult = item.getTweetResult("item")
if tweetResult.notNull:
result.history.add parseGraphTweet(tweetResult)
proc extractTweetsFromEntry*(e: JsonNode): seq[Tweet] =
with tweetResult, getTweetResult(e):
var tweet = parseGraphTweet(tweetResult)
if not tweet.available: if not tweet.available:
tweet.id = e.getEntryId.getId tweet.tombstone = getTombstone(content{"tombstone"})
result.add tweet result.thread.content.add tweet
return
for item in e{"content", "items"}: proc parseConversation*(js: JsonNode; tweetId: string): Conversation =
with tweetResult, item.getTweetResult("item"): result = Conversation(replies: Result[Chain](beginning: true))
var tweet = parseGraphTweet(tweetResult) let global = parseGlobalObjects(? js)
if not tweet.available:
tweet.id = item.getEntryId.getId
result.add tweet
proc parseGraphTimeline*(js: JsonNode; after=""): Profile = let instructions = ? js{"timeline", "instructions"}
result = Profile(tweets: Timeline(beginning: after.len == 0))
let instructions = ? select(
js{"data", "list", "timeline_response", "timeline", "instructions"},
js{"data", "user", "result", "timeline", "timeline", "instructions"},
js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
)
if instructions.len == 0: if instructions.len == 0:
return return
for i in instructions: for e in instructions[0]{"addEntries", "entries"}:
if i{"moduleItems"}.notNull: let entry = e{"entryId"}.getStr
for item in i{"moduleItems"}: if "tweet" in entry or "tombstone" in entry:
with tweetResult, item.getTweetResult("item"): let tweet = finalizeTweet(global, e.getEntryId)
let tweet = parseGraphTweet(tweetResult) if $tweet.id != tweetId:
if not tweet.available: result.before.content.add tweet
tweet.id = item.getEntryId.getId else:
result.tweets.content.add tweet result.tweet = tweet
continue elif "conversationThread" in entry:
let (thread, self) = parseThread(e, global)
if thread.content.len > 0:
if self:
result.after = thread
else:
result.replies.content.add thread
elif "cursor-showMore" in entry:
result.replies.bottom = e.getCursor
elif "cursor-bottom" in entry:
result.replies.bottom = e.getCursor
if i{"entries"}.notNull: proc parseStatus*(js: JsonNode): Tweet =
for e in i{"entries"}: with e, js{"errors"}:
let entryId = e.getEntryId if e.getError == tweetNotFound:
if entryId.startsWith("tweet") or entryId.startsWith("profile-grid"): return
for tweet in extractTweetsFromEntry(e):
result.tweets.content.add tweet
elif "-conversation-" in entryId or entryId.startsWith("homeConversation"):
let (thread, self) = parseGraphThread(e)
result.tweets.content.add thread.content
elif entryId.startsWith("cursor-bottom"):
result.tweets.bottom = e{"content", "value"}.getStr
if after.len == 0: result = parseTweet(js)
if i.getTypeName == "TimelinePinEntry": if not result.isNil:
let tweets = extractTweetsFromEntry(i{"entry"}) result.user = parseUser(js{"user"})
if tweets.len > 0:
var tweet = tweets[0]
tweet.pinned = true
result.pinned = some tweet
proc parseGraphPhotoRail*(js: JsonNode): PhotoRail = with quote, js{"quoted_status"}:
result = @[] result.quote = some parseStatus(js{"quoted_status"})
let instructions = select( proc parseInstructions[T](res: var Result[T]; global: GlobalObjects; js: JsonNode) =
js{"data", "user", "result", "timeline", "timeline", "instructions"}, if js.kind != JArray or js.len == 0:
js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
)
if instructions.len == 0:
return return
for i in instructions: for i in js:
if i{"moduleItems"}.notNull: when T is Tweet:
for item in i{"moduleItems"}: if res.beginning and i{"pinEntry"}.notNull:
with tweetResult, item.getTweetResult("item"): with pin, parsePin(i, global):
let t = parseGraphTweet(tweetResult) res.content.add pin
if not t.available:
t.id = item.getEntryId.getId
let photo = extractGalleryPhoto(t) with r, i{"replaceEntry", "entry"}:
if photo.url.len > 0: if "top" in r{"entryId"}.getStr:
result.add photo res.top = r.getCursor
elif "bottom" in r{"entryId"}.getStr:
res.bottom = r.getCursor
if result.len == 16: proc parseTimeline*(js: JsonNode; after=""): Timeline =
return result = Timeline(beginning: after.len == 0)
continue let global = parseGlobalObjects(? js)
if i.getTypeName != "TimelineAddEntries": let instructions = ? js{"timeline", "instructions"}
continue if instructions.len == 0: return
for e in i{"entries"}: result.parseInstructions(global, instructions)
let entryId = e.getEntryId
if entryId.startsWith("tweet") or entryId.startsWith("profile-grid"):
for t in extractTweetsFromEntry(e):
let photo = extractGalleryPhoto(t)
if photo.url.len > 0:
result.add photo
if result.len == 16: for e in instructions[0]{"addEntries", "entries"}:
return let entry = e{"entryId"}.getStr
if "tweet" in entry or entry.startsWith("sq-I-t") or "tombstone" in entry:
let tweet = finalizeTweet(global, e.getEntryId)
if not tweet.available: continue
result.content.add tweet
elif "cursor-top" in entry:
result.top = e.getCursor
elif "cursor-bottom" in entry:
result.bottom = e.getCursor
elif entry.startsWith("sq-C"):
with cursor, e{"content", "operation", "cursor"}:
if cursor{"cursorType"}.getStr == "Bottom":
result.bottom = cursor{"value"}.getStr
else:
result.top = cursor{"value"}.getStr
proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] = proc parsePhotoRail*(js: JsonNode): PhotoRail =
result = Result[T](beginning: after.len == 0) for tweet in js:
let
t = parseTweet(tweet)
url = if t.photos.len > 0: t.photos[0]
elif t.video.isSome: get(t.video).thumb
elif t.gif.isSome: get(t.gif).thumb
elif t.card.isSome: get(t.card).image
else: ""
let instructions = select( if url.len == 0: continue
js{"data", "search", "timeline_response", "timeline", "instructions"}, result.add GalleryPhoto(url: url, tweetId: $t.id)
js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
)
if instructions.len == 0:
return
for instruction in instructions:
let typ = getTypeName(instruction)
if typ == "TimelineAddEntries":
for e in instruction{"entries"}:
let entryId = e.getEntryId
when T is Tweets:
if entryId.startsWith("tweet"):
with tweetRes, getTweetResult(e):
let tweet = parseGraphTweet(tweetRes)
if not tweet.available:
tweet.id = entryId.getId
result.content.add tweet
elif T is User:
if entryId.startsWith("user"):
with userRes, e{"content", "itemContent"}:
result.content.add parseGraphUser(userRes)
if entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
elif typ == "TimelineReplaceEntry":
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
result.bottom = instruction{"entry", "content", "value"}.getStr

View File

@@ -1,23 +1,15 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import std/[times, macros, htmlgen, options, algorithm, re] import std/[strutils, times, macros, htmlgen, options, algorithm, re]
import std/strutils except escape
import std/unicode except strip import std/unicode except strip
from xmltree import escape
import packedjson import packedjson
import types, utils, formatters import types, utils, formatters
const
unicodeOpen = "\uFFFA"
unicodeClose = "\uFFFB"
xmlOpen = escape("<")
xmlClose = escape(">")
let let
unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})" unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
unReplace = "$1<a href=\"/$2\">@$2</a>" unReplace = "$1<a href=\"/$2\">@$2</a>"
htRegex = re"(^|[^\w-_./?])([#$]|)([\w_]+)" htRegex = re"(^|[^\w-_./?])([#$]|)([\w_]+)"
htReplace = "$1<a href=\"/search?f=tweets&q=%23$3\">$2$3</a>" htReplace = "$1<a href=\"/search?q=%23$3\">$2$3</a>"
type type
ReplaceSliceKind = enum ReplaceSliceKind = enum
@@ -36,19 +28,13 @@ template `?`*(js: JsonNode): untyped =
if j.isNull: return if j.isNull: return
j j
template select*(a, b: JsonNode): untyped = template `with`*(ident, value, body): untyped =
if a.notNull: a else: b block:
template select*(a, b, c: JsonNode): untyped =
if a.notNull: a elif b.notNull: b else: c
template with*(ident, value, body): untyped =
if true:
let ident {.inject.} = value let ident {.inject.} = value
if ident != nil: body if ident != nil: body
template with*(ident; value: JsonNode; body): untyped = template `with`*(ident; value: JsonNode; body): untyped =
if true: block:
let ident {.inject.} = value let ident {.inject.} = value
if value.notNull: body if value.notNull: body
@@ -59,19 +45,6 @@ template getError*(js: JsonNode): Error =
if js.kind != JArray or js.len == 0: null if js.kind != JArray or js.len == 0: null
else: Error(js[0]{"code"}.getInt) else: Error(js[0]{"code"}.getInt)
proc getTweetResult*(js: JsonNode; root="content"): JsonNode =
select(
js{root, "content", "tweet_results", "result"},
js{root, "itemContent", "tweet_results", "result"},
js{root, "content", "tweetResult", "result"}
)
template getTypeName*(js: JsonNode): string =
js{"__typename"}.getStr(js{"type"}.getStr)
template getEntryId*(e: JsonNode): string =
e{"entryId"}.getStr(e{"entry_id"}.getStr)
template parseTime(time: string; f: static string; flen: int): DateTime = template parseTime(time: string; f: static string; flen: int): DateTime =
if time.len != flen: return if time.len != flen: return
parse(time, f, utc()) parse(time, f, utc())
@@ -82,32 +55,29 @@ proc getDateTime*(js: JsonNode): DateTime =
proc getTime*(js: JsonNode): DateTime = proc getTime*(js: JsonNode): DateTime =
parseTime(js.getStr, "ddd MMM dd hh:mm:ss \'+0000\' yyyy", 30) parseTime(js.getStr, "ddd MMM dd hh:mm:ss \'+0000\' yyyy", 30)
proc getTimeFromMs*(js: JsonNode): DateTime = proc getId*(id: string): string {.inline.} =
let ms = js.getInt(0)
if ms == 0: return
let seconds = ms div 1000
return fromUnix(seconds).utc()
proc getTimeFromMsStr*(js: JsonNode): DateTime =
var ms: int64
try: ms = parseBiggestInt(js.getStr("0"))
except ValueError: return
if ms == 0: return
let seconds = ms div 1000
return fromUnix(seconds).utc()
proc getId*(id: string): int64 {.inline.} =
let start = id.rfind("-") let start = id.rfind("-")
if start < 0: if start < 0: return id
return parseBiggestInt(id) id[start + 1 ..< id.len]
return parseBiggestInt(id[start + 1 ..< id.len])
proc getId*(js: JsonNode): int64 {.inline.} = proc getId*(js: JsonNode): int64 {.inline.} =
case js.kind case js.kind
of JString: return js.getStr("0").getId of JString: return parseBiggestInt(js.getStr("0"))
of JInt: return js.getBiggestInt() of JInt: return js.getBiggestInt()
else: return 0 else: return 0
proc getEntryId*(js: JsonNode): string {.inline.} =
let entry = js{"entryId"}.getStr
if entry.len == 0: return
if "tweet" in entry or "sq-I-t" in entry:
return entry.getId
elif "tombstone" in entry:
return js{"content", "item", "content", "tombstone", "tweet", "id"}.getStr
else:
echo "unknown entry: ", entry
return
template getStrVal*(js: JsonNode; default=""): string = template getStrVal*(js: JsonNode; default=""): string =
js{"string_value"}.getStr(default) js{"string_value"}.getStr(default)
@@ -119,9 +89,6 @@ proc getImageStr*(js: JsonNode): string =
template getImageVal*(js: JsonNode): string = template getImageVal*(js: JsonNode): string =
js{"image_value", "url"}.getImageStr js{"image_value", "url"}.getImageStr
template getExpandedUrl*(js: JsonNode; fallback=""): string =
js{"expanded_url"}.getStr(js{"url"}.getStr(fallback))
proc getCardUrl*(js: JsonNode; kind: CardKind): string = proc getCardUrl*(js: JsonNode; kind: CardKind): string =
result = js{"website_url"}.getStrVal result = js{"website_url"}.getStrVal
if kind == promoVideoConvo: if kind == promoVideoConvo:
@@ -163,31 +130,16 @@ proc getBanner*(js: JsonNode): string =
return return
proc getTombstone*(js: JsonNode): string = proc getTombstone*(js: JsonNode): string =
result = js{"text"}.getStr result = js{"tombstoneInfo", "richText", "text"}.getStr
result.removeSuffix(" Learn more") result.removeSuffix(" Learn more")
proc getMp4Resolution*(url: string): int =
# parses the height out of a URL like this one:
# https://video.twimg.com/ext_tw_video/<tweet-id>/pu/vid/720x1280/<random>.mp4
const vidSep = "/vid/"
let
vidIdx = url.find(vidSep) + vidSep.len
resIdx = url.find('x', vidIdx) + 1
res = url[resIdx ..< url.find("/", resIdx)]
try:
return parseInt(res)
except ValueError:
# cannot determine resolution (e.g. m3u8/non-mp4 video)
return 0
proc extractSlice(js: JsonNode): Slice[int] = proc extractSlice(js: JsonNode): Slice[int] =
result = js["indices"][0].getInt ..< js["indices"][1].getInt result = js["indices"][0].getInt ..< js["indices"][1].getInt
proc extractUrls(result: var seq[ReplaceSlice]; js: JsonNode; proc extractUrls(result: var seq[ReplaceSlice]; js: JsonNode;
textLen: int; hideTwitter = false) = textLen: int; hideTwitter = false) =
let let
url = js.getExpandedUrl url = js["expanded_url"].getStr
slice = js.extractSlice slice = js.extractSlice
if hideTwitter and slice.b.succ >= textLen and url.isTwitterUrl: if hideTwitter and slice.b.succ >= textLen and url.isTwitterUrl:
@@ -214,7 +166,7 @@ proc replacedWith(runes: seq[Rune]; repls: openArray[ReplaceSlice];
let let
name = $runes[rep.slice.a.succ .. rep.slice.b] name = $runes[rep.slice.a.succ .. rep.slice.b]
symbol = $runes[rep.slice.a] symbol = $runes[rep.slice.a]
result.add a(symbol & name, href = "/search?f=tweets&q=%23" & name) result.add a(symbol & name, href = "/search?q=%23" & name)
of rkMention: of rkMention:
result.add a($runes[rep.slice], href = rep.url, title = rep.display) result.add a($runes[rep.slice], href = rep.url, title = rep.display)
of rkUrl: of rkUrl:
@@ -248,7 +200,7 @@ proc expandUserEntities*(user: var User; js: JsonNode) =
ent = ? js{"entities"} ent = ? js{"entities"}
with urls, ent{"url", "urls"}: with urls, ent{"url", "urls"}:
user.website = urls[0].getExpandedUrl user.website = urls[0]{"expanded_url"}.getStr
var replacements = newSeq[ReplaceSlice]() var replacements = newSeq[ReplaceSlice]()
@@ -263,37 +215,47 @@ proc expandUserEntities*(user: var User; js: JsonNode) =
user.bio = user.bio.replacef(unRegex, unReplace) user.bio = user.bio.replacef(unRegex, unReplace)
.replacef(htRegex, htReplace) .replacef(htRegex, htReplace)
proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlice: Slice[int]; proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
replyTo=""; hasRedundantLink=false) = let
let hasCard = tweet.card.isSome orig = tweet.text.toRunes
textRange = js{"display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt
hasQuote = js{"is_quote_status"}.getBool
hasCard = tweet.card.isSome
var replyTo = ""
if tweet.replyId != 0:
with reply, js{"in_reply_to_screen_name"}:
tweet.reply.add reply.getStr
replyTo = reply.getStr
let ent = ? js{"entities"}
var replacements = newSeq[ReplaceSlice]() var replacements = newSeq[ReplaceSlice]()
with urls, entities{"urls"}: with urls, ent{"urls"}:
for u in urls: for u in urls:
let urlStr = u["url"].getStr let urlStr = u["url"].getStr
if urlStr.len == 0 or urlStr notin text: if urlStr.len == 0 or urlStr notin tweet.text:
continue continue
replacements.extractUrls(u, textSlice.b, hideTwitter = hasQuote)
replacements.extractUrls(u, textSlice.b, hideTwitter = hasRedundantLink)
if hasCard and u{"url"}.getStr == get(tweet.card).url: if hasCard and u{"url"}.getStr == get(tweet.card).url:
get(tweet.card).url = u.getExpandedUrl get(tweet.card).url = u{"expanded_url"}.getStr
with media, entities{"media"}: with media, ent{"media"}:
for m in media: for m in media:
replacements.extractUrls(m, textSlice.b, hideTwitter = true) replacements.extractUrls(m, textSlice.b, hideTwitter = true)
if "hashtags" in entities: if "hashtags" in ent:
for hashtag in entities["hashtags"]: for hashtag in ent["hashtags"]:
replacements.extractHashtags(hashtag) replacements.extractHashtags(hashtag)
if "symbols" in entities: if "symbols" in ent:
for symbol in entities["symbols"]: for symbol in ent["symbols"]:
replacements.extractHashtags(symbol) replacements.extractHashtags(symbol)
if "user_mentions" in entities: if "user_mentions" in ent:
for mention in entities["user_mentions"]: for mention in ent["user_mentions"]:
let let
name = mention{"screen_name"}.getStr name = mention{"screen_name"}.getStr
slice = mention.extractSlice slice = mention.extractSlice
@@ -310,110 +272,5 @@ proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlic
replacements.deduplicate replacements.deduplicate
replacements.sort(cmp) replacements.sort(cmp)
tweet.text = text.toRunes.replacedWith(replacements, textSlice).strip(leading=false) tweet.text = orig.replacedWith(replacements, textSlice)
.strip(leading=false)
proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
let
entities = ? js{"entities"}
textRange = js{"display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt
hasQuote = js{"is_quote_status"}.getBool
hasJobCard = tweet.card.isSome and get(tweet.card).kind == jobDetails
var replyTo = ""
if tweet.replyId != 0:
with reply, js{"in_reply_to_screen_name"}:
replyTo = reply.getStr
tweet.reply.add replyTo
tweet.expandTextEntities(entities, tweet.text, textSlice, replyTo, hasQuote or hasJobCard)
proc expandTextEntitiesV2(tweet: Tweet; js: JsonNode; text: string; textSlice: Slice[int];
hasRedundantLink=false) =
let hasCard = tweet.card.isSome
var replacements = newSeq[ReplaceSlice]()
with urls, js{"url_entities"}:
for u in urls:
let urlStr = u["url"].getStr
if urlStr.len == 0 or urlStr notin text:
continue
replacements.extractUrls(u, textSlice.b, hideTwitter = hasRedundantLink)
if hasCard and u{"url"}.getStr == get(tweet.card).url:
get(tweet.card).url = u.getExpandedUrl
with hashtags, js{"details", "hashtag_entities"}:
for hashtag in hashtags:
replacements.extractHashtags(hashtag)
with cashtags, js{"details", "cashtag_entities"}:
for cashtag in cashtags:
replacements.extractHashtags(cashtag)
with mentions, js{"mention_entities"}:
for mention in mentions:
let
name = mention{"screen_name"}.getStr
slice = mention.extractSlice
idx = tweet.reply.find(name)
if slice.a >= textSlice.a:
replacements.add ReplaceSlice(kind: rkMention, slice: slice,
url: "/" & name, display: mention["name"].getStr)
elif idx == -1 and tweet.replyId != 0:
tweet.reply.add name
replacements.deduplicate
replacements.sort(cmp)
tweet.text = text.toRunes.replacedWith(replacements, textSlice).strip(leading=false)
proc expandTweetEntitiesV2*(tweet: Tweet; js: JsonNode) =
let
textRange = js{"details", "display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt
hasQuote = "quoted_tweet_results" in js
hasJobCard = tweet.card.isSome and get(tweet.card).kind == jobDetails
tweet.expandTextEntitiesV2(js, tweet.text, textSlice, hasQuote or hasJobCard)
proc expandNoteTweetEntities*(tweet: Tweet; js: JsonNode) =
let
entities = ? js{"entity_set"}
text = js{"text"}.getStr.multiReplace(("<", unicodeOpen), (">", unicodeClose))
textSlice = 0..text.runeLen
tweet.expandTextEntities(entities, text, textSlice)
tweet.text = tweet.text.multiReplace((unicodeOpen, xmlOpen), (unicodeClose, xmlClose))
proc expandBirdwatchEntities*(text: string; entities: JsonNode): string =
let runes = text.toRunes
var replacements: seq[ReplaceSlice]
for entity in entities:
let
fromIdx = entity{"from_index"}.getInt
toIdx = entity{"to_index"}.getInt
url = entity{"ref", "url"}.getStr
if url.len > 0:
replacements.add ReplaceSlice(
kind: rkUrl,
slice: fromIdx ..< toIdx,
url: url,
display: $runes[fromIdx ..< min(toIdx, runes.len)]
)
replacements.sort(cmp)
result = runes.replacedWith(replacements, 0 ..< runes.len)
proc extractGalleryPhoto*(t: Tweet): GalleryPhoto =
let url =
if t.media.len > 0: t.media[0].getThumb
elif t.card.isSome: get(t.card).image
else: ""
result = GalleryPhoto(url: url, tweetId: $t.id)

View File

@@ -1,22 +1,22 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import tables, strutils import tables
import types, prefs_impl import types, prefs_impl
from config import get from config import get
from parsecfg import nil from parsecfg import nil
export genUpdatePrefs, genResetPrefs, genApplyPrefs export genUpdatePrefs, genResetPrefs
var defaultPrefs*: Prefs var defaultPrefs*: Prefs
proc updateDefaultPrefs*(cfg: parsecfg.Config) = proc updateDefaultPrefs*(cfg: parsecfg.Config) =
genDefaultPrefs() genDefaultPrefs()
proc getPrefs*(cookies, params: Table[string, string]): Prefs = proc getPrefs*(cookies: Table[string, string]): Prefs =
result = defaultPrefs result = defaultPrefs
genParsePrefs(cookies) genCookiePrefs(cookies)
genParsePrefs(params)
proc encodePrefs*(prefs: Prefs): string = template getPref*(cookies: Table[string, string], pref): untyped =
var encPairs: seq[string] bind genCookiePref
genEncodePrefs(prefs) var res = defaultPrefs.`pref`
encPairs.join(",") genCookiePref(cookies, pref, res)
res

View File

@@ -60,9 +60,6 @@ genPrefs:
stickyProfile(checkbox, true): stickyProfile(checkbox, true):
"Make profile sidebar stick to top" "Make profile sidebar stick to top"
stickyNav(checkbox, true):
"Keep navbar fixed to top"
bidiSupport(checkbox, false): bidiSupport(checkbox, false):
"Support bidirectional text (makes clicking on tweets harder)" "Support bidirectional text (makes clicking on tweets harder)"
@@ -78,9 +75,6 @@ genPrefs:
hideReplies(checkbox, false): hideReplies(checkbox, false):
"Hide tweet replies" "Hide tweet replies"
hideCommunityNotes(checkbox, false):
"Hide community notes"
squareAvatars(checkbox, false): squareAvatars(checkbox, false):
"Square profile pictures" "Square profile pictures"
@@ -89,7 +83,7 @@ genPrefs:
"Enable mp4 video playback (only for gifs)" "Enable mp4 video playback (only for gifs)"
hlsPlayback(checkbox, false): hlsPlayback(checkbox, false):
"Enable HLS video streaming (requires JavaScript)" "Enable hls video streaming (requires JavaScript)"
proxyVideos(checkbox, true): proxyVideos(checkbox, true):
"Proxy video streaming through the server (might be slow)" "Proxy video streaming through the server (might be slow)"
@@ -100,17 +94,6 @@ genPrefs:
autoplayGifs(checkbox, true): autoplayGifs(checkbox, true):
"Autoplay gifs" "Autoplay gifs"
compactGallery(checkbox, false):
"Compact media gallery (no profile info or text)"
gallerySize(select, "Medium"):
"Gallery column size"
options: @["Small", "Medium", "Large"]
mediaView(select, "Timeline"):
"Default media view"
options: @["Timeline", "Grid", "Gallery"]
"Link replacements (blank to disable)": "Link replacements (blank to disable)":
replaceTwitter(input, ""): replaceTwitter(input, ""):
"Twitter -> Nitter" "Twitter -> Nitter"
@@ -124,6 +107,10 @@ genPrefs:
"Reddit -> Teddit/Libreddit" "Reddit -> Teddit/Libreddit"
placeholder: "Teddit hostname" placeholder: "Teddit hostname"
replaceInstagram(input, ""):
"Instagram -> Bibliogram"
placeholder: "Bibliogram hostname"
iterator allPrefs*(): Pref = iterator allPrefs*(): Pref =
for k, v in prefList: for k, v in prefList:
for pref in v: for pref in v:
@@ -144,7 +131,7 @@ macro genDefaultPrefs*(): untyped =
result.add quote do: result.add quote do:
defaultPrefs.`ident` = cfg.get("Preferences", `name`, `default`) defaultPrefs.`ident` = cfg.get("Preferences", `name`, `default`)
macro genParsePrefs*(prefs): untyped = macro genCookiePrefs*(cookies): untyped =
result = nnkStmtList.newTree() result = nnkStmtList.newTree()
for pref in allPrefs(): for pref in allPrefs():
let let
@@ -154,17 +141,37 @@ macro genParsePrefs*(prefs): untyped =
options = pref.options options = pref.options
result.add quote do: result.add quote do:
if `name` in `prefs`: if `name` in `cookies`:
when `kind` == input or `name` == "theme": when `kind` == input or `name` == "theme":
result.`ident` = `prefs`[`name`] result.`ident` = `cookies`[`name`]
elif `kind` == checkbox: elif `kind` == checkbox:
result.`ident` = `prefs`[`name`] == "on" or result.`ident` = `cookies`[`name`] == "on"
`prefs`[`name`] == "true" or
`prefs`[`name`] == "1"
else: else:
let value = `prefs`[`name`] let value = `cookies`[`name`]
if value in `options`: result.`ident` = value if value in `options`: result.`ident` = value
macro genCookiePref*(cookies, prefName, res): untyped =
result = nnkStmtList.newTree()
for pref in allPrefs():
let ident = ident(pref.name)
if ident != prefName:
continue
let
name = pref.name
kind = newLit(pref.kind)
options = pref.options
result.add quote do:
if `name` in `cookies`:
when `kind` == input or `name` == "theme":
`res` = `cookies`[`name`]
elif `kind` == checkbox:
`res` = `cookies`[`name`] == "on"
else:
let value = `cookies`[`name`]
if value in `options`: `res` = value
macro genUpdatePrefs*(): untyped = macro genUpdatePrefs*(): untyped =
result = nnkStmtList.newTree() result = nnkStmtList.newTree()
let req = ident("request") let req = ident("request")
@@ -199,36 +206,6 @@ macro genResetPrefs*(): untyped =
result.add quote do: result.add quote do:
savePref(`name`, "", `req`, expire=true) savePref(`name`, "", `req`, expire=true)
macro genEncodePrefs*(prefs): untyped =
result = nnkStmtList.newTree()
for pref in allPrefs():
let
name = newLit(pref.name)
ident = ident(pref.name)
kind = newLit(pref.kind)
defaultIdent = nnkDotExpr.newTree(ident("defaultPrefs"), ident(pref.name))
result.add quote do:
when `kind` == checkbox:
if `prefs`.`ident` != `defaultIdent`:
if `prefs`.`ident`:
encPairs.add `name` & "=on"
else:
encPairs.add `name` & "="
else:
if `prefs`.`ident` != `defaultIdent`:
encPairs.add `name` & "=" & `prefs`.`ident`
macro genApplyPrefs*(params, req): untyped =
result = nnkStmtList.newTree()
for pref in allPrefs():
let name = newLit(pref.name)
result.add quote do:
if `name` in `params`:
savePref(`name`, `params`[`name`], `req`)
else:
savePref(`name`, "", `req`, expire=true)
macro genPrefsType*(): untyped = macro genPrefsType*(): untyped =
let name = nnkPostfix.newTree(ident("*"), ident("Prefs")) let name = nnkPostfix.newTree(ident("*"), ident("Prefs"))
result = quote do: result = quote do:

View File

@@ -1,14 +1,15 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, sequtils, tables, uri import strutils, strformat, sequtils, tables, uri
import types, utils import types
const const
validFilters* = @[ validFilters* = @[
"media", "images", "twimg", "videos", "media", "images", "twimg", "videos",
"native_video", "consumer_video", "spaces", "native_video", "consumer_video", "pro_video",
"links", "news", "quote", "mentions", "links", "news", "quote", "mentions",
"replies", "retweets", "nativeretweets" "replies", "retweets", "nativeretweets",
"verified", "safe"
] ]
emptyQuery* = "include:nativeretweets" emptyQuery* = "include:nativeretweets"
@@ -20,13 +21,12 @@ template `@`(param: string): untyped =
proc initQuery*(pms: Table[string, string]; name=""): Query = proc initQuery*(pms: Table[string, string]; name=""): Query =
result = Query( result = Query(
kind: parseEnum[QueryKind](@"f", tweets), kind: parseEnum[QueryKind](@"f", tweets),
view: @"view",
text: @"q", text: @"q",
filters: validFilters.filterIt("f-" & it in pms), filters: validFilters.filterIt("f-" & it in pms),
excludes: validFilters.filterIt("e-" & it in pms), excludes: validFilters.filterIt("e-" & it in pms),
since: @"since", since: @"since",
until: @"until", until: @"until",
minLikes: validateNumber(@"min_faves") near: @"near"
) )
if name.len > 0: if name.len > 0:
@@ -46,7 +46,7 @@ proc getReplyQuery*(name: string): Query =
fromUser: @[name] fromUser: @[name]
) )
proc genQueryParam*(query: Query; maxId=""): string = proc genQueryParam*(query: Query): string =
var var
filters: seq[string] filters: seq[string]
param: string param: string
@@ -55,20 +55,15 @@ proc genQueryParam*(query: Query; maxId=""): string =
return query.text return query.text
for i, user in query.fromUser: for i, user in query.fromUser:
if i == 0: param &= &"from:{user} "
param = "("
param &= &"from:{user}"
if i < query.fromUser.high: if i < query.fromUser.high:
param &= " OR " param &= "OR "
else:
param &= ")"
if query.fromUser.len > 0 and query.kind in {posts, media}: if query.fromUser.len > 0 and query.kind in {posts, media}:
param &= " (filter:self_threads OR -filter:replies)" param &= "filter:self_threads OR-filter:replies "
if "nativeretweets" notin query.excludes: if "nativeretweets" notin query.excludes:
param &= " include:nativeretweets" param &= "include:nativeretweets "
for f in query.filters: for f in query.filters:
filters.add "filter:" & f filters.add "filter:" & f
@@ -78,49 +73,38 @@ proc genQueryParam*(query: Query; maxId=""): string =
for i in query.includes: for i in query.includes:
filters.add "include:" & i filters.add "include:" & i
if filters.len > 0: result = strip(param & filters.join(&" {query.sep} "))
result = strip(param & " (" & filters.join(&" {query.sep} ") & ")")
else:
result = strip(param)
if query.since.len > 0: if query.since.len > 0:
result &= " since:" & query.since result &= " since:" & query.since
if query.until.len > 0 and maxId.len == 0: if query.until.len > 0:
result &= " until:" & query.until result &= " until:" & query.until
if query.minLikes.len > 0: if query.near.len > 0:
result &= " min_faves:" & query.minLikes result &= &" near:\"{query.near}\" within:15mi"
if query.text.len > 0: if query.text.len > 0:
if result.len > 0: if result.len > 0:
result &= " " & query.text result &= " " & query.text
else: else:
result = query.text result = query.text
if result.len > 0 and maxId.len > 0:
result &= " max_id:" & maxId
proc genQueryUrl*(query: Query): string = proc genQueryUrl*(query: Query): string =
var params: seq[string] if query.kind notin {tweets, users}: return
if query.view.len > 0: var params = @[&"f={query.kind}"]
params.add "view=" & encodeUrl(query.view) if query.text.len > 0:
params.add "q=" & encodeUrl(query.text)
for f in query.filters:
params.add "f-" & f & "=on"
for e in query.excludes:
params.add "e-" & e & "=on"
for i in query.includes.filterIt(it != "nativeretweets"):
params.add "i-" & i & "=on"
if query.kind in {tweets, users}: if query.since.len > 0:
params.add &"f={query.kind}" params.add "since=" & query.since
if query.text.len > 0: if query.until.len > 0:
params.add "q=" & encodeUrl(query.text) params.add "until=" & query.until
for f in query.filters: if query.near.len > 0:
params.add &"f-{f}=on" params.add "near=" & query.near
for e in query.excludes:
params.add &"e-{e}=on"
for i in query.includes.filterIt(it != "nativeretweets"):
params.add &"i-{i}=on"
if query.since.len > 0:
params.add "since=" & query.since
if query.until.len > 0:
params.add "until=" & query.until
if query.minLikes.len > 0:
params.add "min_faves=" & query.minLikes
if params.len > 0: if params.len > 0:
result &= params.join("&") result &= params.join("&")

View File

@@ -52,7 +52,6 @@ proc initRedisPool*(cfg: Config) {.async.} =
await migrate("profileDates", "p:*") await migrate("profileDates", "p:*")
await migrate("profileStats", "p:*") await migrate("profileStats", "p:*")
await migrate("userType", "p:*") await migrate("userType", "p:*")
await migrate("verifiedType", "p:*")
pool.withAcquire(r): pool.withAcquire(r):
# optimize memory usage for user ID buckets # optimize memory usage for user ID buckets
@@ -86,7 +85,7 @@ proc cache*(data: List) {.async.} =
await setEx(data.listKey, listCacheTime, compress(toFlatty(data))) await setEx(data.listKey, listCacheTime, compress(toFlatty(data)))
proc cache*(data: PhotoRail; name: string) {.async.} = proc cache*(data: PhotoRail; name: string) {.async.} =
await setEx("pr2:" & toLower(name), baseCacheTime * 2, compress(toFlatty(data))) await setEx("pr:" & toLower(name), baseCacheTime, compress(toFlatty(data)))
proc cache*(data: User) {.async.} = proc cache*(data: User) {.async.} =
if data.username.len == 0: return if data.username.len == 0: return
@@ -119,11 +118,11 @@ proc getUserId*(username: string): Future[string] {.async.} =
pool.withAcquire(r): pool.withAcquire(r):
result = await r.hGet(name.uidKey, name) result = await r.hGet(name.uidKey, name)
if result == redisNil: if result == redisNil:
let user = await getGraphUser(username) let user = await getUser(username)
if user.suspended: if user.suspended:
return "suspended" return "suspended"
else: else:
await all(cacheUserId(name, user.id), cache(user)) await cacheUserId(name, user.id)
return user.id return user.id
proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} = proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
@@ -131,7 +130,8 @@ proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
if prof != redisNil: if prof != redisNil:
prof.deserialize(User) prof.deserialize(User)
elif fetch: elif fetch:
result = await getGraphUser(username) let userId = await getUserId(username)
result = await getGraphUser(userId)
await cache(result) await cache(result)
proc getCachedUsername*(userId: string): Future[string] {.async.} = proc getCachedUsername*(userId: string): Future[string] {.async.} =
@@ -142,57 +142,28 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
if username != redisNil: if username != redisNil:
result = username result = username
else: else:
let user = await getGraphUserById(userId) let user = await getUserById(userId)
result = user.username result = user.username
await setEx(key, baseCacheTime, result) await setEx(key, baseCacheTime, result)
if result.len > 0 and user.id.len > 0:
await all(cacheUserId(result, user.id), cache(user))
# proc getCachedTweet*(id: int64): Future[Tweet] {.async.} = proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
# if id == 0: return if id == 0: return
# let tweet = await get(id.tweetKey) let tweet = await get(id.tweetKey)
# if tweet != redisNil: if tweet != redisNil:
# tweet.deserialize(Tweet) tweet.deserialize(Tweet)
# else:
# result = await getGraphTweetResult($id)
# if not result.isNil:
# await cache(result)
proc cache*(data: Broadcast) {.async.} =
if data.id.len == 0: return
await setEx("bc:" & data.id, baseCacheTime, compress(toFlatty(data)))
proc getCachedBroadcast*(id: string): Future[Broadcast] {.async.} =
if id.len == 0: return
let cached = await get("bc:" & id)
if cached != redisNil:
cached.deserialize(Broadcast)
else: else:
result = await getBroadcastInfo(id) result = await getStatus($id)
await cache(result) if result.isNil:
result.m3u8Url = await fetchBroadcastStream(result.mediaKey) await cache(result)
proc cache*(data: AccountInfo; name: string) {.async.} = proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
await setEx("ai:" & toLower(name), baseCacheTime * 24, compress(toFlatty(data))) if name.len == 0: return
let rail = await get("pr:" & toLower(name))
proc getCachedAccountInfo*(username: string; fetch=true): Future[AccountInfo] {.async.} =
if username.len == 0: return
let name = toLower(username)
let cached = await get("ai:" & name)
if cached != redisNil:
cached.deserialize(AccountInfo)
elif fetch:
result = await getAboutAccount(username)
await cache(result, name)
proc getCachedPhotoRail*(id: string): Future[PhotoRail] {.async.} =
if id.len == 0: return
let rail = await get("pr2:" & toLower(id))
if rail != redisNil: if rail != redisNil:
rail.deserialize(PhotoRail) rail.deserialize(PhotoRail)
else: else:
result = await getPhotoRail(id) result = await getPhotoRail(name)
await cache(result, id) await cache(result, name)
proc getCachedList*(username=""; slug=""; id=""): Future[List] {.async.} = proc getCachedList*(username=""; slug=""; id=""): Future[List] {.async.} =
let list = if id.len == 0: redisNil let list = if id.len == 0: redisNil

View File

@@ -1,44 +0,0 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, strutils
import jester
import router_utils
import ".."/[types, formatters, redis_cache]
import ../views/[general, broadcast]
import media
export broadcast
proc createBroadcastRouter*(cfg: Config) =
router broadcastRoute:
get "/i/broadcasts/@id":
cond @"id".allCharsInSet({'a'..'z', 'A'..'Z', '0'..'9'})
var bc: Broadcast
try:
bc = await getCachedBroadcast(@"id")
except:
discard
if bc.id.len == 0:
resp Http404, showError("Broadcast not found", cfg)
let prefs = requestPrefs()
resp renderMain(renderBroadcast(bc, prefs, request.path), request, cfg, prefs,
bc.title, ogTitle=bc.title)
get "/i/broadcasts/@id/stream":
cond @"id".allCharsInSet({'a'..'z', 'A'..'Z', '0'..'9'})
var bc: Broadcast
try:
bc = await getCachedBroadcast(@"id")
except:
discard
if bc.m3u8Url.len == 0:
resp Http404
let manifest = await safeFetch(bc.m3u8Url)
if manifest.len == 0:
resp Http502
resp proxifyVideo(manifest, requestPrefs().proxyVideos, bc.m3u8Url), m3u8Mime

View File

@@ -1,13 +1,10 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import jester import jester
import router_utils import router_utils
import ".."/[auth, types] import ".."/[tokens, types]
proc createDebugRouter*(cfg: Config) = proc createDebugRouter*(cfg: Config) =
router debug: router debug:
get "/.health": get "/.tokens":
respJson getSessionPoolHealth()
get "/.sessions":
cond cfg.enableDebug cond cfg.enableDebug
respJson getSessionPoolDebug() respJson getPoolJson()

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, strutils, strformat, options import asyncdispatch, strutils, options
import jester, karax/vdom import jester, karax/vdom
import ".."/[types, api] import ".."/[types, api]
import ../views/[embed, tweet, general] import ../views/[embed, tweet, general]
@@ -10,27 +10,27 @@ export api, embed, vdom, tweet, general, router_utils
proc createEmbedRouter*(cfg: Config) = proc createEmbedRouter*(cfg: Config) =
router embed: router embed:
get "/i/videos/tweet/@id": get "/i/videos/tweet/@id":
let tweet = await getGraphTweetResult(@"id") let convo = await getTweet(@"id")
if tweet == nil or not tweet.hasVideos: if convo == nil or convo.tweet == nil or convo.tweet.video.isNone:
resp Http404 resp Http404
resp renderVideoEmbed(tweet, cfg, request) resp renderVideoEmbed(convo.tweet, cfg, request)
get "/@user/status/@id/embed": get "/@user/status/@id/embed":
let let
tweet = await getGraphTweetResult(@"id") convo = await getTweet(@"id")
prefs = requestPrefs() prefs = cookiePrefs()
path = getPath() path = getPath()
if tweet == nil: if convo == nil or convo.tweet == nil:
resp Http404 resp Http404
resp renderTweetEmbed(tweet, path, prefs, cfg, request) resp $renderTweetEmbed(convo.tweet, path, prefs, cfg, request)
get "/embed/Tweet.html": get "/embed/Tweet.html":
let id = @"id" let id = @"id"
if id.len > 0: if id.len > 0:
redirect(&"/i/status/{id}/embed") redirect("/i/status/" & id & "/embed")
else: else:
resp Http404 resp Http404

View File

@@ -1,25 +1,23 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, uri import strutils, uri
import jester import jester
import router_utils import router_utils
import ".."/[types, redis_cache, api] import ".."/[types, redis_cache, api]
import ../views/[general, timeline, list] import ../views/[general, timeline, list]
export getListTimeline, getGraphList
template respList*(list, timeline, title, vnode: typed) = template respList*(list, timeline, title, vnode: typed) =
if list.id.len == 0 or list.name.len == 0: if list.id.len == 0 or list.name.len == 0:
resp Http404, showError(&"""List "{@"id"}" not found""", cfg) resp Http404, showError("List " & @"id" & " not found", cfg)
let let
html = renderList(vnode, timeline.query, list) html = renderList(vnode, timeline.query, list)
rss = if cfg.enableRSSList: &"""/i/lists/{@"id"}/rss""" else: "" rss = "/i/lists/$1/rss" % [@"id"]
resp renderMain(html, request, cfg, prefs, titleText=title, rss=rss, banner=list.banner) resp renderMain(html, request, cfg, prefs, titleText=title, rss=rss, banner=list.banner)
proc title*(list: List): string =
&"@{list.username}/{list.name}"
proc createListRouter*(cfg: Config) = proc createListRouter*(cfg: Config) =
router list: router list:
get "/@name/lists/@slug/?": get "/@name/lists/@slug/?":
@@ -30,22 +28,24 @@ proc createListRouter*(cfg: Config) =
slug = decodeUrl(@"slug") slug = decodeUrl(@"slug")
list = await getCachedList(@"name", slug) list = await getCachedList(@"name", slug)
if list.id.len == 0: if list.id.len == 0:
resp Http404, showError(&"""List "{@"slug"}" not found""", cfg) resp Http404, showError("List \"" & @"slug" & "\" not found", cfg)
redirect(&"/i/lists/{list.id}") redirect("/i/lists/" & list.id)
get "/i/lists/@id/?": get "/i/lists/@id/?":
cond '.' notin @"id" cond '.' notin @"id"
let let
prefs = requestPrefs() prefs = cookiePrefs()
list = await getCachedList(id=(@"id")) list = await getCachedList(id=(@"id"))
timeline = await getGraphListTweets(list.id, getCursor()) title = "@" & list.username & "/" & list.name
timeline = await getListTimeline(list.id, getCursor())
vnode = renderTimelineTweets(timeline, prefs, request.path) vnode = renderTimelineTweets(timeline, prefs, request.path)
respList(list, timeline, list.title, vnode) respList(list, timeline, title, vnode)
get "/i/lists/@id/members": get "/i/lists/@id/members":
cond '.' notin @"id" cond '.' notin @"id"
let let
prefs = requestPrefs() prefs = cookiePrefs()
list = await getCachedList(id=(@"id")) list = await getCachedList(id=(@"id"))
title = "@" & list.username & "/" & list.name
members = await getGraphListMembers(list, getCursor()) members = await getGraphListMembers(list, getCursor())
respList(list, members, list.title, renderTimelineUsers(members, prefs, request.path)) respList(list, members, title, renderTimelineUsers(members, prefs, request.path))

View File

@@ -37,8 +37,6 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
try: try:
let res = await client.get(url) let res = await client.get(url)
if res.status != "200 OK": if res.status != "200 OK":
if res.status != "404 Not Found":
echo "[media] Proxying failed, status: $1, url: $2" % [res.status, url]
return Http404 return Http404
let hashed = $hash(url) let hashed = $hash(url)
@@ -52,10 +50,10 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
"" ""
let headers = newHttpHeaders({ let headers = newHttpHeaders({
"content-type": res.headers["content-type", 0], "Content-Type": res.headers["content-type", 0],
"content-length": contentLength, "Content-Length": contentLength,
"cache-control": maxAge, "Cache-Control": maxAge,
"etag": hashed "ETag": hashed
}) })
respond(request, headers) respond(request, headers)
@@ -67,7 +65,6 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
await request.client.send(data) await request.client.send(data)
data.setLen 0 data.setLen 0
except HttpRequestError, ProtocolError, OSError: except HttpRequestError, ProtocolError, OSError:
echo "[media] Proxying exception, error: $1, url: $2" % [getCurrentExceptionMsg(), url]
result = Http404 result = Http404
finally: finally:
client.close() client.close()
@@ -86,33 +83,17 @@ proc decoded*(req: jester.Request; index: int): string =
if based: decode(encoded) if based: decode(encoded)
else: decodeUrl(encoded) else: decodeUrl(encoded)
proc normalizeImgUrl*(url: var string) =
if not url.startsWith("http"):
if "twimg.com" notin url:
url.insert(twimg)
url.insert(https)
proc createMediaRouter*(cfg: Config) = proc createMediaRouter*(cfg: Config) =
router media: router media:
get "/pic/?": get "/pic/?":
resp Http404 resp Http404
get re"^\/pic\/orig\/(enc)?\/?(.+)":
var url = decoded(request, 1)
cond "/amplify_video/" notin url
normalizeImgUrl(url)
url.add("?name=orig")
let uri = parseUri(url)
cond isTwitterUrl(uri) == true
let code = await proxyMedia(request, url)
check code
get re"^\/pic\/(enc)?\/?(.+)": get re"^\/pic\/(enc)?\/?(.+)":
var url = decoded(request, 1) var url = decoded(request, 1)
cond "/amplify_video/" notin url if "twimg.com" notin url:
normalizeImgUrl(url) url.insert(twimg)
if not url.startsWith(https):
url.insert(https)
let uri = parseUri(url) let uri = parseUri(url)
cond isTwitterUrl(uri) == true cond isTwitterUrl(uri) == true
@@ -125,7 +106,7 @@ proc createMediaRouter*(cfg: Config) =
cond "http" in url cond "http" in url
if getHmac(url) != request.matches[1]: if getHmac(url) != request.matches[1]:
resp Http403, showError("Failed to verify signature", cfg) resp showError("Failed to verify signature", cfg)
if ".mp4" in url or ".ts" in url or ".m4s" in url: if ".mp4" in url or ".ts" in url or ".m4s" in url:
let code = await proxyMedia(request, url) let code = await proxyMedia(request, url)
@@ -141,6 +122,6 @@ proc createMediaRouter*(cfg: Config) =
if ".m3u8" in url: if ".m3u8" in url:
let vid = await safeFetch(url) let vid = await safeFetch(url)
content = proxifyVideo(vid, requestPrefs().proxyVideos, url) content = proxifyVideo(vid, cookiePref(proxyVideos))
resp content, m3u8Mime resp content, m3u8Mime

View File

@@ -19,10 +19,8 @@ proc createPrefRouter*(cfg: Config) =
router preferences: router preferences:
get "/settings": get "/settings":
let let
prefs = requestPrefs() prefs = cookiePrefs()
prefsCode = encodePrefs(prefs) html = renderPreferences(prefs, refPath(), findThemes(cfg.staticDir))
prefsUrl = getUrlPrefix(cfg) & "/?prefs=" & prefsCode
html = renderPreferences(prefs, refPath(), findThemes(cfg.staticDir), prefsUrl)
resp renderMain(html, request, cfg, prefs, "Preferences") resp renderMain(html, request, cfg, prefs, "Preferences")
get "/settings/@i?": get "/settings/@i?":

View File

@@ -18,8 +18,8 @@ proc createResolverRouter*(cfg: Config) =
router resolver: router resolver:
get "/cards/@card/@id": get "/cards/@card/@id":
let url = "https://cards.twitter.com/cards/$1/$2" % [@"card", @"id"] let url = "https://cards.twitter.com/cards/$1/$2" % [@"card", @"id"]
respResolved(await resolve(url, requestPrefs()), "card") respResolved(await resolve(url, cookiePrefs()), "card")
get "/t.co/@url": get "/t.co/@url":
let url = "https://t.co/" & @"url" let url = "https://t.co/" & @"url"
respResolved(await resolve(url, requestPrefs()), "t.co") respResolved(await resolve(url, cookiePrefs()), "t.co")

View File

@@ -9,13 +9,21 @@ export utils, prefs, types, uri
template savePref*(pref, value: string; req: Request; expire=false) = template savePref*(pref, value: string; req: Request; expire=false) =
if not expire or pref in cookies(req): if not expire or pref in cookies(req):
setCookie(pref, value, daysForward(when expire: -10 else: 360), setCookie(pref, value, daysForward(when expire: -10 else: 360),
httpOnly=true, secure=cfg.useHttps, sameSite=None, path="/") httpOnly=true, secure=cfg.useHttps, sameSite=None)
template requestPrefs*(): untyped {.dirty.} = template cookiePrefs*(): untyped {.dirty.} =
getPrefs(cookies(request), params(request)) getPrefs(cookies(request))
template cookiePref*(pref): untyped {.dirty.} =
getPref(cookies(request), pref)
template themePrefs*(): Prefs =
var res = defaultPrefs
res.theme = cookiePref(theme)
res
template showError*(error: string; cfg: Config): string = template showError*(error: string; cfg: Config): string =
renderMain(renderError(error), request, cfg, requestPrefs(), "Error") renderMain(renderError(error), request, cfg, themePrefs(), "Error")
template getPath*(): untyped {.dirty.} = template getPath*(): untyped {.dirty.} =
$(parseUri(request.path) ? filterParams(request.params)) $(parseUri(request.path) ? filterParams(request.params))
@@ -35,28 +43,5 @@ template getCursor*(req: Request): string =
proc getNames*(name: string): seq[string] = proc getNames*(name: string): seq[string] =
name.strip(chars={'/'}).split(",").filterIt(it.len > 0) name.strip(chars={'/'}).split(",").filterIt(it.len > 0)
template applyUrlPrefs*() {.dirty.} =
if @"prefs".len > 0:
var prefParams = initTable[string, string]()
for pair in @"prefs".split(','):
let kv = pair.split('=', maxsplit=1)
if kv.len == 2:
prefParams[kv[0]] = kv[1]
elif kv.len == 1 and kv[0].len > 0:
prefParams[kv[0]] = ""
genApplyPrefs(prefParams, request)
# Rebuild URL without prefs param
var params: seq[(string, string)]
for k, v in request.params:
if k != "prefs":
params.add (k, v)
if params.len > 0:
let cleanUrl = request.getNativeReq.url ? params
redirect($cleanUrl)
else:
redirect(request.path)
template respJson*(node: JsonNode) = template respJson*(node: JsonNode) =
resp $node, "application/json" resp $node, "application/json"

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, tables, times, hashes, uri import asyncdispatch, strutils, tables, times, hashes, uri
import jester import jester
@@ -10,12 +10,7 @@ include "../views/rss.nimf"
export times, hashes export times, hashes
proc redisKey*(page, name, cursor: string): string = proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.} =
result = page & ":" & name
if cursor.len > 0:
result &= ":" & cursor
proc timelineRss*(req: Request; cfg: Config; query: Query; prefs: Prefs): Future[Rss] {.async.} =
var profile: Profile var profile: Profile
let let
name = req.params.getOrDefault("name") name = req.params.getOrDefault("name")
@@ -23,30 +18,32 @@ proc timelineRss*(req: Request; cfg: Config; query: Query; prefs: Prefs): Future
names = getNames(name) names = getNames(name)
if names.len == 1: if names.len == 1:
profile = await fetchProfile(after, query, skipRail=true) profile = await fetchProfile(after, query, skipRail=true, skipPinned=true)
else: else:
var q = query var q = query
q.fromUser = names q.fromUser = names
profile.tweets = await getGraphTweetSearch(q, after) profile = Profile(
# this is kinda dumb tweets: await getSearch[Tweet](q, after),
profile.user = User( # this is kinda dumb
username: name, user: User(
fullname: names.join(" | "), username: name,
userpic: "https://abs.twimg.com/sticky/default_profile_images/default_profile.png" fullname: names.join(" | "),
userpic: "https://abs.twimg.com/sticky/default_profile_images/default_profile.png"
)
) )
if profile.user.suspended: if profile.user.suspended:
return Rss(feed: profile.user.username, cursor: "suspended") return Rss(feed: profile.user.username, cursor: "suspended")
if profile.user.fullname.len > 0: if profile.user.fullname.len > 0:
let rss = renderTimelineRss(profile, cfg, prefs, multi=(names.len > 1)) let rss = renderTimelineRss(profile, cfg, multi=(names.len > 1))
return Rss(feed: rss, cursor: profile.tweets.bottom) return Rss(feed: rss, cursor: profile.tweets.bottom)
template respRss*(rss, page) = template respRss*(rss, page) =
if rss.cursor.len == 0: if rss.cursor.len == 0:
let info = case page let info = case page
of "User": " \"" & @"name" & "\" " of "User": " \"$1\" " % @"name"
of "List": " \"" & @"id" & "\" " of "List": " $1 " % @"id"
else: " " else: " "
resp Http404, showError(page & info & "not found", cfg) resp Http404, showError(page & info & "not found", cfg)
@@ -60,89 +57,75 @@ template respRss*(rss, page) =
proc createRssRouter*(cfg: Config) = proc createRssRouter*(cfg: Config) =
router rss: router rss:
get "/search/rss": get "/search/rss":
if not cfg.enableRSSSearch: cond cfg.enableRss
resp Http403, showError("RSS feed is disabled", cfg)
if @"q".len > 200: if @"q".len > 200:
resp Http400, showError("Search input too long.", cfg) resp Http400, showError("Search input too long.", cfg)
let let query = initQuery(params(request))
prefs = requestPrefs()
query = initQuery(params(request))
if query.kind != tweets: if query.kind != tweets:
resp Http400, showError("Only Tweet searches are allowed for RSS feeds.", cfg) resp Http400, showError("Only Tweet searches are allowed for RSS feeds.", cfg)
let let
cursor = getCursor() cursor = getCursor()
key = redisKey("search", $hash(genQueryUrl(query)), cursor) key = "search:" & $hash(genQueryUrl(query)) & ":" & cursor
var rss = await getCachedRss(key) var rss = await getCachedRss(key)
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "Search") respRss(rss, "Search")
let tweets = await getGraphTweetSearch(query, cursor) let tweets = await getSearch[Tweet](query, cursor)
rss.cursor = tweets.bottom rss.cursor = tweets.bottom
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg, prefs) rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)
await cacheRss(key, rss) await cacheRss(key, rss)
respRss(rss, "Search") respRss(rss, "Search")
get "/@name/rss": get "/@name/rss":
cond cfg.enableRss
cond '.' notin @"name" cond '.' notin @"name"
if not cfg.enableRSSUserTweets:
resp Http403, showError("RSS feed is disabled", cfg)
let let
prefs = requestPrefs() cursor = getCursor()
name = @"name" name = @"name"
key = redisKey("twitter", name, getCursor()) key = "twitter:" & name & ":" & cursor
var rss = await getCachedRss(key) var rss = await getCachedRss(key)
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "User") respRss(rss, "User")
rss = await timelineRss(request, cfg, Query(fromUser: @[name]), prefs) rss = await timelineRss(request, cfg, Query(fromUser: @[name]))
await cacheRss(key, rss) await cacheRss(key, rss)
respRss(rss, "User") respRss(rss, "User")
get "/@name/@tab/rss": get "/@name/@tab/rss":
cond cfg.enableRss
cond '.' notin @"name" cond '.' notin @"name"
cond @"tab" in ["with_replies", "media", "search"] cond @"tab" in ["with_replies", "media", "search"]
let rssEnabled = case @"tab" let name = @"name"
of "with_replies": cfg.enableRSSUserReplies let query =
of "media": cfg.enableRSSUserMedia case @"tab"
of "search": cfg.enableRSSSearch of "with_replies": getReplyQuery(name)
else: false of "media": getMediaQuery(name)
if not rssEnabled: of "search": initQuery(params(request), name=name)
resp Http403, showError("RSS feed is disabled", cfg) else: Query(fromUser: @[name])
let
prefs = requestPrefs()
name = @"name"
tab = @"tab"
query =
case tab
of "with_replies": getReplyQuery(name)
of "media": getMediaQuery(name)
of "search": initQuery(params(request), name=name)
else: Query(fromUser: @[name])
let searchKey = if tab != "search": "" var key = @"tab" & ":" & @"name" & ":"
else: ":" & $hash(genQueryUrl(query)) if @"tab" == "search":
key &= $hash(genQueryUrl(query)) & ":"
let key = redisKey(tab, name & searchKey, getCursor()) key &= getCursor()
var rss = await getCachedRss(key) var rss = await getCachedRss(key)
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "User") respRss(rss, "User")
rss = await timelineRss(request, cfg, query, prefs) rss = await timelineRss(request, cfg, query)
await cacheRss(key, rss) await cacheRss(key, rss)
respRss(rss, "User") respRss(rss, "User")
get "/@name/lists/@slug/rss": get "/@name/lists/@slug/rss":
cond cfg.enableRss
cond @"name" != "i" cond @"name" != "i"
if not cfg.enableRSSList:
resp Http403, showError("RSS feed is disabled", cfg)
let let
slug = decodeUrl(@"slug") slug = decodeUrl(@"slug")
list = await getCachedList(@"name", slug) list = await getCachedList(@"name", slug)
@@ -158,23 +141,22 @@ proc createRssRouter*(cfg: Config) =
redirect(url) redirect(url)
get "/i/lists/@id/rss": get "/i/lists/@id/rss":
if not cfg.enableRSSList: cond cfg.enableRss
resp Http403, showError("RSS feed is disabled", cfg)
let let
prefs = requestPrefs()
id = @"id"
cursor = getCursor() cursor = getCursor()
key = redisKey("lists", id, cursor) key =
if cursor.len == 0: "lists:" & @"id"
else: "lists:" & @"id" & ":" & cursor
var rss = await getCachedRss(key) var rss = await getCachedRss(key)
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "List") respRss(rss, "List")
let let
list = await getCachedList(id=id) list = await getCachedList(id=(@"id"))
timeline = await getGraphListTweets(list.id, cursor) timeline = await getListTimeline(list.id, cursor)
rss.cursor = timeline.bottom rss.cursor = timeline.bottom
rss.feed = renderListRss(timeline.content, list, cfg, prefs) rss.feed = renderListRss(timeline.content, list, cfg)
await cacheRss(key, rss) await cacheRss(key, rss)
respRss(rss, "List") respRss(rss, "List")

View File

@@ -14,38 +14,32 @@ export search
proc createSearchRouter*(cfg: Config) = proc createSearchRouter*(cfg: Config) =
router search: router search:
get "/search/?": get "/search/?":
let q = @"q" if @"q".len > 500:
if q.len > 500:
resp Http400, showError("Search input too long.", cfg) resp Http400, showError("Search input too long.", cfg)
let let
prefs = requestPrefs() prefs = cookiePrefs()
query = initQuery(params(request)) query = initQuery(params(request))
title = "Search" & (if q.len > 0: " (" & q & ")" else: "")
case query.kind case query.kind
of users: of users:
if "," in q: if "," in @"q":
redirect("/" & q) redirect("/" & @"q")
var users: Result[User] let users = await getSearch[User](query, getCursor())
try: resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs)
users = await getGraphUserSearch(query, getCursor())
except InternalError:
users = Result[User](beginning: true, query: query)
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
of tweets: of tweets:
let let
tweets = await getGraphTweetSearch(query, getCursor()) tweets = await getSearch[Tweet](query, getCursor())
rss = if cfg.enableRSSSearch: "/search/rss?" & genQueryUrl(query) else: "" rss = "/search/rss?" & genQueryUrl(query)
resp renderMain(renderTweetSearch(tweets, prefs, getPath()), resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
request, cfg, prefs, title, rss=rss) request, cfg, prefs, rss=rss)
else: else:
resp Http404, showError("Invalid search", cfg) resp Http404, showError("Invalid search", cfg)
get "/hashtag/@hash": get "/hashtag/@hash":
redirect("/search?f=tweets&q=" & encodeUrl("#" & @"hash")) redirect("/search?q=" & encodeUrl("#" & @"hash"))
get "/opensearch": get "/opensearch":
let url = getUrlPrefix(cfg) & "/search?f=tweets&q=" let url = getUrlPrefix(cfg) & "/search?q="
resp Http200, {"Content-Type": "application/opensearchdescription+xml"}, resp Http200, {"Content-Type": "application/opensearchdescription+xml"},
generateOpenSearchXML(cfg.title, cfg.hostname, url) generateOpenSearchXML(cfg.title, cfg.hostname, url)

View File

@@ -16,21 +16,19 @@ proc createStatusRouter*(cfg: Config) =
router status: router status:
get "/@name/status/@id/?": get "/@name/status/@id/?":
cond '.' notin @"name" cond '.' notin @"name"
let id = @"id" cond not @"id".any(c => not c.isDigit)
let prefs = cookiePrefs()
if id.len > 19 or id.any(c => not c.isDigit):
resp Http404, showError("Invalid tweet ID", cfg)
let prefs = requestPrefs()
# used for the infinite scroll feature # used for the infinite scroll feature
if @"scroll".len > 0: if @"scroll".len > 0:
let replies = await getReplies(id, getCursor()) let replies = await getReplies(@"id", getCursor())
if replies.content.len == 0: if replies.content.len == 0:
resp Http204 resp Http404, ""
resp $renderReplies(replies, prefs, getPath()) resp $renderReplies(replies, prefs, getPath())
let conv = await getTweet(id, getCursor()) let conv = await getTweet(@"id", getCursor())
if conv == nil:
echo "nil conv"
if conv == nil or conv.tweet == nil or conv.tweet.id == 0: if conv == nil or conv.tweet == nil or conv.tweet.id == 0:
var error = "Tweet not found" var error = "Tweet not found"
@@ -44,19 +42,15 @@ proc createStatusRouter*(cfg: Config) =
desc = conv.tweet.text desc = conv.tweet.text
var var
images = conv.tweet.getPhotos.mapIt(it.url) images = conv.tweet.photos
video = "" video = ""
let if conv.tweet.video.isSome():
firstMediaKind = if conv.tweet.media.len > 0: conv.tweet.media[0].kind images = @[get(conv.tweet.video).thumb]
else: photoMedia
if firstMediaKind == videoMedia:
images = @[conv.tweet.media[0].getThumb]
video = getVideoEmbed(cfg, conv.tweet.id) video = getVideoEmbed(cfg, conv.tweet.id)
elif firstMediaKind == gifMedia: elif conv.tweet.gif.isSome():
images = @[conv.tweet.media[0].getThumb] images = @[get(conv.tweet.gif).thumb]
video = getPicUrl(conv.tweet.media[0].gif.url) video = getPicUrl(get(conv.tweet.gif).url)
elif conv.tweet.card.isSome(): elif conv.tweet.card.isSome():
let card = conv.tweet.card.get() let card = conv.tweet.card.get()
if card.image.len > 0: if card.image.len > 0:
@@ -68,26 +62,6 @@ proc createStatusRouter*(cfg: Config) =
resp renderMain(html, request, cfg, prefs, title, desc, ogTitle, resp renderMain(html, request, cfg, prefs, title, desc, ogTitle,
images=images, video=video) images=images, video=video)
get "/@name/status/@id/history/?":
cond '.' notin @"name"
let id = @"id"
if id.len > 19 or id.any(c => not c.isDigit):
resp Http404, showError("Invalid tweet ID", cfg)
let edits = await getGraphEditHistory(id)
if edits.latest == nil or edits.latest.id == 0:
resp Http404, showError("Tweet history not found", cfg)
let
prefs = requestPrefs()
title = "History for " & pageTitle(edits.latest)
ogTitle = "Edit History for " & pageTitle(edits.latest.user)
desc = edits.latest.text
let html = renderEditHistory(edits, prefs, getPath())
resp renderMain(html, request, cfg, prefs, title, desc, ogTitle)
get "/@name/@s/@id/@m/?@i?": get "/@name/@s/@id/@m/?@i?":
cond @"s" in ["status", "statuses"] cond @"s" in ["status", "statuses"]
cond @"m" in ["video", "photo"] cond @"m" in ["video", "photo"]
@@ -98,6 +72,3 @@ proc createStatusRouter*(cfg: Config) =
get "/i/web/status/@id": get "/i/web/status/@id":
redirect("/i/status/" & @"id") redirect("/i/status/" & @"id")
get "/@name/thread/@id/?":
redirect("/$1/status/$2" % [@"name", @"id"])

View File

@@ -4,28 +4,20 @@ import jester, karax/vdom
import router_utils import router_utils
import ".."/[types, redis_cache, formatters, query, api] import ".."/[types, redis_cache, formatters, query, api]
import ../views/[general, profile, timeline, status, search, about_account] import ../views/[general, profile, timeline, status, search]
export vdom export vdom
export uri, sequtils export uri, sequtils
export router_utils export router_utils
export redis_cache, formatters, query, api export redis_cache, formatters, query, api
export profile, timeline, status, about_account export profile, timeline, status
proc getQuery*(request: Request; tab, name: string; prefs: Prefs): Query = proc getQuery*(request: Request; tab, name: string): Query =
let view = request.params.getOrDefault("view")
case tab case tab
of "with_replies": of "with_replies": getReplyQuery(name)
result = getReplyQuery(name) of "media": getMediaQuery(name)
of "media": of "search": initQuery(params(request), name=name)
result = getMediaQuery(name) else: Query(fromUser: @[name])
result.view =
if view in ["timeline", "grid", "gallery"]: view
else: prefs.mediaView.toLowerAscii
of "search":
result = initQuery(params(request), name=name)
else:
result = Query(fromUser: @[name])
template skipIf[T](cond: bool; default; body: Future[T]): Future[T] = template skipIf[T](cond: bool; default; body: Future[T]): Future[T] =
if cond: if cond:
@@ -35,7 +27,8 @@ template skipIf[T](cond: bool; default; body: Future[T]): Future[T] =
else: else:
body body
proc fetchProfile*(after: string; query: Query; skipRail=false): Future[Profile] {.async.} = proc fetchProfile*(after: string; query: Query; skipRail=false;
skipPinned=false): Future[Profile] {.async.} =
let let
name = query.fromUser[0] name = query.fromUser[0]
userId = await getUserId(name) userId = await getUserId(name)
@@ -52,23 +45,36 @@ proc fetchProfile*(after: string; query: Query; skipRail=false): Future[Profile]
after.setLen 0 after.setLen 0
let let
timeline =
case query.kind
of posts: getTimeline(userId, after)
of replies: getTimeline(userId, after, replies=true)
of media: getMediaTimeline(userId, after)
else: getSearch[Tweet](query, after)
rail = rail =
skipIf(skipRail or query.kind == media, @[]): skipIf(skipRail or query.kind == media, @[]):
getCachedPhotoRail(userId) getCachedPhotoRail(name)
user = getCachedUser(name) user = await getCachedUser(name)
info = getCachedAccountInfo(name, fetch=false)
result = var pinned: Option[Tweet]
case query.kind if not skipPinned and user.pinnedTweet > 0 and
of posts: await getGraphUserTweets(userId, TimelineKind.tweets, after) after.len == 0 and query.kind in {posts, replies}:
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after) let tweet = await getCachedTweet(user.pinnedTweet)
of media: await getGraphUserTweets(userId, TimelineKind.media, after) if not tweet.isNil:
else: Profile(tweets: await getGraphTweetSearch(query, after)) tweet.pinned = true
pinned = some tweet
result.user = await user result = Profile(
result.photoRail = await rail user: user,
result.accountInfo = await info pinned: pinned,
tweets: await timeline,
photoRail: await rail
)
if result.user.protected or result.user.suspended:
return
result.tweets.query = query result.tweets.query = query
@@ -76,11 +82,11 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} = rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1: if query.fromUser.len != 1:
let let
timeline = await getGraphTweetSearch(query, after) timeline = await getSearch[Tweet](query, after)
html = renderTweetSearch(timeline, prefs, getPath()) html = renderTweetSearch(timeline, prefs, getPath())
return renderMain(html, request, cfg, prefs, "Multi", rss=rss) return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
var profile = await fetchProfile(after, query) var profile = await fetchProfile(after, query, skipPinned=prefs.hidePins)
template u: untyped = profile.user template u: untyped = profile.user
if u.suspended: if u.suspended:
@@ -115,46 +121,24 @@ proc createTimelineRouter*(cfg: Config) =
get "/intent/user": get "/intent/user":
respUserId() respUserId()
get "/intent/follow/?":
let username = request.params.getOrDefault("screen_name")
if username.len == 0:
resp Http400, showError("Missing screen_name parameter", cfg)
redirect("/" & username)
get "/@name/about/?":
cond @"name".allCharsInSet({'a'..'z', 'A'..'Z', '0'..'9', '_'})
let
prefs = requestPrefs()
name = @"name"
info = await getCachedAccountInfo(name)
if info.suspended:
resp showError(getSuspended(name), cfg)
if info.username.len == 0:
resp Http404, showError("User \"" & name & "\" not found", cfg)
let aboutHtml = renderAboutAccount(info)
resp renderMain(aboutHtml, request, cfg, prefs,
"About @" & info.username)
get "/@name/?@tab?/?": get "/@name/?@tab?/?":
cond '.' notin @"name" cond '.' notin @"name"
cond @"name" notin ["pic", "gif", "video", "search", "settings", "login", "intent", "i"] cond @"name" notin ["pic", "gif", "video"]
cond @"name".allCharsInSet({'a'..'z', 'A'..'Z', '0'..'9', '_', ','})
cond @"tab" in ["with_replies", "media", "search", ""] cond @"tab" in ["with_replies", "media", "search", ""]
let let
prefs = requestPrefs() prefs = cookiePrefs()
after = getCursor() after = getCursor()
names = getNames(@"name") names = getNames(@"name")
var query = request.getQuery(@"tab", @"name", prefs) var query = request.getQuery(@"tab", @"name")
if names.len != 1: if names.len != 1:
query.fromUser = names query.fromUser = names
# used for the infinite scroll feature # used for the infinite scroll feature
if @"scroll".len > 0: if @"scroll".len > 0:
if query.fromUser.len != 1: if query.fromUser.len != 1:
var timeline = await getGraphTweetSearch(query, after) var timeline = await getSearch[Tweet](query, after)
if timeline.content.len == 0: if timeline.content.len == 0: resp Http404
resp Http204
timeline.beginning = true timeline.beginning = true
resp $renderTweetSearch(timeline, prefs, getPath()) resp $renderTweetSearch(timeline, prefs, getPath())
else: else:
@@ -163,17 +147,8 @@ proc createTimelineRouter*(cfg: Config) =
profile.tweets.beginning = true profile.tweets.beginning = true
resp $renderTimelineTweets(profile.tweets, prefs, getPath()) resp $renderTimelineTweets(profile.tweets, prefs, getPath())
let rssEnabled =
if @"tab".len == 0: cfg.enableRSSUserTweets
elif @"tab" == "with_replies": cfg.enableRSSUserReplies
elif @"tab" == "media": cfg.enableRSSUserMedia
elif @"tab" == "search": cfg.enableRSSSearch
else: false
let rss = let rss =
if not rssEnabled: if @"tab".len == 0:
""
elif @"tab".len == 0:
"/$1/rss" % @"name" "/$1/rss" % @"name"
elif @"tab" == "search": elif @"tab" == "search":
"/$1/search/rss?$2" % [@"name", genQueryUrl(query)] "/$1/search/rss?$2" % [@"name", genQueryUrl(query)]

View File

@@ -10,14 +10,14 @@ export feature
proc createUnsupportedRouter*(cfg: Config) = proc createUnsupportedRouter*(cfg: Config) =
router unsupported: router unsupported:
template feature {.dirty.} = template feature {.dirty.} =
resp renderMain(renderFeature(), request, cfg, requestPrefs()) resp renderMain(renderFeature(), request, cfg, themePrefs())
get "/about/feature": feature() get "/about/feature": feature()
get "/login/?@i?": feature() get "/login/?@i?": feature()
get "/@name/lists/?": feature() get "/@name/lists/?": feature()
get "/intent/?@i?": get "/intent/?@i?":
cond @"i" notin ["user", "follow"] cond @"i" notin ["user"]
feature() feature()
get "/i/@i?/?@j?": get "/i/@i?/?@j?":

View File

@@ -1,75 +0,0 @@
.broadcast-page {
max-width: 800px;
width: 100%;
margin: 20px auto 0;
}
.broadcast-panel {
background-color: var(--bg_panel);
border: 1px solid var(--border_grey);
border-radius: 8px;
overflow: hidden;
}
.broadcast-player {
position: relative;
background: black;
video,
img {
display: block;
width: 100%;
}
}
.broadcast-info {
padding: 14px 16px;
}
.broadcast-title {
font-size: 18px;
font-weight: bold;
margin: 0 0 12px;
}
.broadcast-user-row {
display: flex;
align-items: center;
justify-content: space-between;
}
.broadcast-user {
display: flex;
align-items: center;
gap: 10px;
color: var(--fg_color);
img {
width: 40px;
height: 40px;
border-radius: 50%;
}
}
.broadcast-username {
color: var(--fg_dark);
}
.broadcast-meta {
color: var(--fg_faded);
font-size: 14px;
display: flex;
flex-direction: column;
align-items: flex-end;
flex-shrink: 0;
line-height: 1.5em;
}
.broadcast-live {
background: #e0245e;
color: white;
padding: 1px 6px;
border-radius: 3px;
font-weight: bold;
font-size: 12px;
}

View File

@@ -1,40 +1,39 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
.panel-container { .panel-container {
margin: auto; margin: auto;
font-size: 130%; font-size: 130%;
} }
.error-panel { .error-panel {
@include center-panel(var(--error_red)); @include center-panel(var(--error_red));
text-align: center; text-align: center;
} }
.search-bar > form { .search-bar > form {
@include center-panel(var(--darkest_grey)); @include center-panel(var(--darkest_grey));
button { button {
background: var(--bg_elements); background: var(--bg_elements);
color: var(--fg_color); color: var(--fg_color);
border: 0; border: 0;
border-radius: 3px; border-radius: 3px;
cursor: pointer; cursor: pointer;
font-weight: bold; font-weight: bold;
width: 30px; width: 30px;
height: 30px; height: 30px;
padding: 0px 5px 1px 8px; }
}
input { input {
font-size: 16px; font-size: 16px;
width: 100%; width: 100%;
background: var(--bg_elements); background: var(--bg_elements);
color: var(--fg_color); color: var(--fg_color);
border: 0; border: 0;
border-radius: 4px; border-radius: 4px;
padding: 4px; padding: 4px;
margin-right: 8px; margin-right: 8px;
height: unset; height: unset;
} }
} }

View File

@@ -66,7 +66,18 @@
} }
#search-panel-toggle:checked ~ .search-panel { #search-panel-toggle:checked ~ .search-panel {
max-height: 380px !important; @if $rows == 6 {
max-height: 200px !important;
}
@if $rows == 5 {
max-height: 300px !important;
}
@if $rows == 4 {
max-height: 300px !important;
}
@if $rows == 3 {
max-height: 365px !important;
}
} }
} }
} }

View File

@@ -1,43 +1,44 @@
// colors // colors
$bg_color: #0f0f0f; $bg_color: #0F0F0F;
$fg_color: #f8f8f2; $fg_color: #F8F8F2;
$fg_faded: #f8f8f2cf; $fg_faded: #F8F8F2CF;
$fg_dark: #ff6c60; $fg_dark: #FF6C60;
$fg_nav: #ff6c60; $fg_nav: #FF6C60;
$bg_panel: #161616; $bg_panel: #161616;
$bg_elements: #121212; $bg_elements: #121212;
$bg_overlays: #1f1f1f; $bg_overlays: #1F1F1F;
$bg_hover: #1a1a1a; $bg_hover: #1A1A1A;
$grey: #888889; $grey: #888889;
$dark_grey: #404040; $dark_grey: #404040;
$darker_grey: #282828; $darker_grey: #282828;
$darkest_grey: #222222; $darkest_grey: #222222;
$border_grey: #3e3e35; $border_grey: #3E3E35;
$accent: #ff6c60; $accent: #FF6C60;
$accent_light: #ffaca0; $accent_light: #FFACA0;
$accent_dark: #8a3731; $accent_dark: #8A3731;
$accent_border: #ff6c6091; $accent_border: #FF6C6091;
$play_button: #d8574d; $play_button: #D8574D;
$play_button_hover: #ff6c60; $play_button_hover: #FF6C60;
$more_replies_dots: #ad433b; $more_replies_dots: #AD433B;
$error_red: #420a05; $error_red: #420A05;
$verified_blue: #1da1f2; $verified_blue: #1DA1F2;
$verified_business: #fac82b;
$verified_government: #c1b6a4;
$icon_text: $fg_color; $icon_text: $fg_color;
$tab: $fg_color; $tab: $fg_color;
$tab_selected: $accent; $tab_selected: $accent;
$shadow: rgba(0, 0, 0, 0.6); $shadow: rgba(0,0,0,.6);
$shadow_dark: rgba(0, 0, 0, 0.2); $shadow_dark: rgba(0,0,0,.2);
//fonts //fonts
$font_0: sans-serif; $font_0: Helvetica Neue;
$font_1: fontello; $font_1: Helvetica;
$font_2: Arial;
$font_3: sans-serif;
$font_4: fontello;

View File

@@ -1,217 +1,165 @@
@import "_variables"; @import '_variables';
@import "tweet/_base"; @import 'tweet/_base';
@import "profile/_base"; @import 'profile/_base';
@import "general"; @import 'general';
@import "navbar"; @import 'navbar';
@import "inputs"; @import 'inputs';
@import "timeline"; @import 'timeline';
@import "search"; @import 'search';
@import "broadcast";
body { body {
// colors // colors
--bg_color: #{$bg_color}; --bg_color: #{$bg_color};
--fg_color: #{$fg_color}; --fg_color: #{$fg_color};
--fg_faded: #{$fg_faded}; --fg_faded: #{$fg_faded};
--fg_dark: #{$fg_dark}; --fg_dark: #{$fg_dark};
--fg_nav: #{$fg_nav}; --fg_nav: #{$fg_nav};
--bg_panel: #{$bg_panel}; --bg_panel: #{$bg_panel};
--bg_elements: #{$bg_elements}; --bg_elements: #{$bg_elements};
--bg_overlays: #{$bg_overlays}; --bg_overlays: #{$bg_overlays};
--bg_hover: #{$bg_hover}; --bg_hover: #{$bg_hover};
--grey: #{$grey}; --grey: #{$grey};
--dark_grey: #{$dark_grey}; --dark_grey: #{$dark_grey};
--darker_grey: #{$darker_grey}; --darker_grey: #{$darker_grey};
--darkest_grey: #{$darkest_grey}; --darkest_grey: #{$darkest_grey};
--border_grey: #{$border_grey}; --border_grey: #{$border_grey};
--accent: #{$accent}; --accent: #{$accent};
--accent_light: #{$accent_light}; --accent_light: #{$accent_light};
--accent_dark: #{$accent_dark}; --accent_dark: #{$accent_dark};
--accent_border: #{$accent_border}; --accent_border: #{$accent_border};
--play_button: #{$play_button}; --play_button: #{$play_button};
--play_button_hover: #{$play_button_hover}; --play_button_hover: #{$play_button_hover};
--more_replies_dots: #{$more_replies_dots}; --more_replies_dots: #{$more_replies_dots};
--error_red: #{$error_red}; --error_red: #{$error_red};
--verified_blue: #{$verified_blue}; --verified_blue: #{$verified_blue};
--verified_business: #{$verified_business}; --icon_text: #{$icon_text};
--verified_government: #{$verified_government};
--icon_text: #{$icon_text};
--tab: #{$fg_color}; --tab: #{$fg_color};
--tab_selected: #{$accent}; --tab_selected: #{$accent};
--profile_stat: #{$fg_color}; --profile_stat: #{$fg_color};
background-color: var(--bg_color); background-color: var(--bg_color);
color: var(--fg_color); color: var(--fg_color);
font-family: $font_0, $font_1; font-family: $font_0, $font_1, $font_2, $font_3;
font-size: 15px; font-size: 14px;
line-height: 1.3; line-height: 1.3;
margin: 0; margin: 0;
} }
* { * {
outline: unset; outline: unset;
margin: 0; margin: 0;
text-decoration: none; text-decoration: none;
}
img {
dynamic-range-limit: standard;
} }
h1 { h1 {
display: inline; display: inline;
} }
h2, h2, h3 {
h3 { font-weight: normal;
font-weight: normal;
} }
p { p {
margin: 14px 0; margin: 14px 0;
} }
a { a {
color: var(--accent); color: var(--accent);
&:hover { &:hover {
text-decoration: underline; text-decoration: underline;
} }
} }
fieldset { fieldset {
border: 0; border: 0;
padding: 0; padding: 0;
margin-top: -0.6em; margin-top: -0.6em;
} }
legend { legend {
width: 100%; width: 100%;
padding: 0.6em 0 0.3em 0; padding: .6em 0 .3em 0;
border: 0; border: 0;
font-size: 16px; font-size: 16px;
font-weight: 600; font-weight: 600;
border-bottom: 1px solid var(--border_grey); border-bottom: 1px solid var(--border_grey);
margin-bottom: 8px; margin-bottom: 8px;
} }
.preferences { .preferences .note {
.note {
border-top: 1px solid var(--border_grey); border-top: 1px solid var(--border_grey);
border-bottom: 1px solid var(--border_grey); border-bottom: 1px solid var(--border_grey);
padding: 6px 0 8px 0; padding: 6px 0 8px 0;
margin-bottom: 8px; margin-bottom: 8px;
margin-top: 16px; margin-top: 16px;
}
.bookmark-note {
margin: 0;
margin-bottom: 10px;
}
} }
ul { ul {
padding-left: 1.3em; padding-left: 1.3em;
} }
.container { .container {
display: flex; display: flex;
flex-wrap: wrap; flex-wrap: wrap;
box-sizing: border-box; box-sizing: border-box;
margin: auto; padding-top: 50px;
min-height: 100vh; margin: auto;
} min-height: 100vh;
body.fixed-nav .container {
padding-top: 50px;
} }
.icon-container { .icon-container {
display: inline; display: inline;
} }
.overlay-panel { .overlay-panel {
max-width: 600px; max-width: 600px;
width: 100%; width: 100%;
margin: 0 auto; margin: 0 auto;
margin-top: 10px; margin-top: 10px;
background-color: var(--bg_overlays); background-color: var(--bg_overlays);
padding: 10px 15px; padding: 10px 15px;
align-self: start; align-self: start;
ul { ul {
margin-bottom: 14px; margin-bottom: 14px;
} }
p { p {
word-break: break-word; word-break: break-word;
} }
} }
.verified-icon { .verified-icon {
display: inline-block; color: var(--icon_text);
width: 14px; background-color: var(--verified_blue);
height: 14px; border-radius: 50%;
margin-bottom: 2px; flex-shrink: 0;
margin: 2px 0 3px 3px;
.verified-icon-circle { padding-top: 2px;
position: absolute; height: 12px;
font-size: 15px; width: 14px;
} font-size: 8px;
display: inline-block;
.verified-icon-check { text-align: center;
position: absolute; vertical-align: middle;
font-size: 9px;
margin: 5px 3px;
}
&.blue {
.verified-icon-circle {
color: var(--verified_blue);
}
.verified-icon-check {
color: var(--icon_text);
}
}
&.business {
.verified-icon-circle {
color: var(--verified_business);
}
.verified-icon-check {
color: var(--bg_panel);
}
}
&.government {
.verified-icon-circle {
color: var(--verified_government);
}
.verified-icon-check {
color: var(--bg_panel);
}
}
} }
@media (max-width: 600px) { @media(max-width: 600px) {
.preferences-container { .preferences-container {
max-width: 95vw; max-width: 95vw;
} }
.nav-item, .nav-item, .nav-item .icon-container {
.nav-item .icon-container { font-size: 16px;
font-size: 16px; }
}
} }

View File

@@ -1,216 +1,185 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
button { button {
@include input-colors; @include input-colors;
background-color: var(--bg_elements); background-color: var(--bg_elements);
color: var(--fg_color); color: var(--fg_color);
border: 1px solid var(--accent_border); border: 1px solid var(--accent_border);
padding: 3px 6px; padding: 3px 6px;
font-size: 14px; font-size: 14px;
cursor: pointer; cursor: pointer;
float: right; float: right;
} }
input[type="text"], input[type="text"],
input[type="date"], input[type="date"],
input[type="number"],
select { select {
@include input-colors; @include input-colors;
background-color: var(--bg_elements); background-color: var(--bg_elements);
padding: 1px 4px; padding: 1px 4px;
color: var(--fg_color); color: var(--fg_color);
border: 1px solid var(--accent_border); border: 1px solid var(--accent_border);
border-radius: 0; border-radius: 0;
font-size: 14px; font-size: 14px;
} }
input[type="number"] { input[type="text"] {
-moz-appearance: textfield; height: 16px;
}
input[type="text"],
input[type="number"] {
height: 16px;
} }
select { select {
height: 20px; height: 20px;
padding: 0 2px; padding: 0 2px;
line-height: 1; line-height: 1;
} }
input[type="date"]::-webkit-inner-spin-button { input[type="date"]::-webkit-inner-spin-button {
display: none; display: none;
}
input[type="number"] {
-moz-appearance: textfield;
}
input[type="number"]::-webkit-inner-spin-button,
input[type="number"]::-webkit-outer-spin-button {
display: none;
-webkit-appearance: none;
margin: 0;
} }
input[type="date"]::-webkit-clear-button { input[type="date"]::-webkit-clear-button {
margin-left: 17px; margin-left: 17px;
filter: grayscale(100%); filter: grayscale(100%);
filter: hue-rotate(120deg); filter: hue-rotate(120deg);
} }
input::-webkit-calendar-picker-indicator { input::-webkit-calendar-picker-indicator {
opacity: 0; opacity: 0;
} }
input::-webkit-datetime-edit-day-field:focus, input::-webkit-datetime-edit-day-field:focus,
input::-webkit-datetime-edit-month-field:focus, input::-webkit-datetime-edit-month-field:focus,
input::-webkit-datetime-edit-year-field:focus { input::-webkit-datetime-edit-year-field:focus {
background-color: var(--accent); background-color: var(--accent);
color: var(--fg_color); color: var(--fg_color);
outline: none; outline: none;
} }
.date-range { .date-range {
.date-input { .date-input {
display: inline-block; display: inline-block;
position: relative; position: relative;
} }
.icon-container { .icon-container {
pointer-events: none; pointer-events: none;
position: absolute; position: absolute;
top: 2px; top: 2px;
right: 5px; right: 5px;
} }
.search-title { .search-title {
margin: 0 2px; margin: 0 2px;
} }
} }
.icon-button button { .icon-button button {
color: var(--accent); color: var(--accent);
text-decoration: none; text-decoration: none;
background: none; background: none;
border: none; border: none;
float: none; float: none;
padding: unset; padding: unset;
padding-left: 4px; padding-left: 4px;
&:hover { &:hover {
color: var(--accent_light); color: var(--accent_light);
} }
} }
.checkbox { .checkbox {
position: absolute;
top: 1px;
right: 0;
height: 17px;
width: 17px;
background-color: var(--bg_elements);
border: 1px solid var(--accent_border);
&:after {
content: "";
position: absolute; position: absolute;
display: none; top: 1px;
} right: 0;
height: 17px;
width: 17px;
background-color: var(--bg_elements);
border: 1px solid var(--accent_border);
&:after {
content: "";
position: absolute;
display: none;
}
} }
.checkbox-container { .checkbox-container {
display: block; display: block;
position: relative; position: relative;
margin-bottom: 5px; margin-bottom: 5px;
cursor: pointer;
user-select: none;
padding-right: 22px;
input {
position: absolute;
opacity: 0;
cursor: pointer; cursor: pointer;
height: 0; user-select: none;
width: 0; padding-right: 22px;
&:checked ~ .checkbox:after { input {
display: block; position: absolute;
opacity: 0;
cursor: pointer;
height: 0;
width: 0;
&:checked ~ .checkbox:after {
display: block;
}
} }
}
&:hover input ~ .checkbox { &:hover input ~ .checkbox {
border-color: var(--accent); border-color: var(--accent);
} }
&:active input ~ .checkbox { &:active input ~ .checkbox {
border-color: var(--accent_light); border-color: var(--accent_light);
} }
.checkbox:after { .checkbox:after {
left: 2px; left: 2px;
bottom: 0; bottom: 0;
font-size: 13px; font-size: 13px;
font-family: $font_1; font-family: $font_4;
content: "\e811"; content: '\e803';
} }
} }
.pref-group { .pref-group {
display: inline; display: inline;
} }
.preferences { .preferences {
button { button {
margin: 6px 0 3px 0; margin: 6px 0 3px 0;
} }
label { label {
padding-right: 150px; padding-right: 150px;
} }
select { select {
position: absolute; position: absolute;
top: 0; top: 0;
right: 0; right: 0;
display: block; display: block;
-moz-appearance: none; -moz-appearance: none;
-webkit-appearance: none; -webkit-appearance: none;
appearance: none; appearance: none;
min-width: 100px; }
}
input[type="text"], input[type="text"] {
input[type="number"] { position: absolute;
position: absolute; right: 0;
right: 0; max-width: 140px;
max-width: 140px; }
}
.pref-group { .pref-group {
display: block; display: block;
} }
.pref-input { .pref-input {
position: relative; position: relative;
margin-bottom: 6px; margin-bottom: 6px;
} }
.pref-reset { .pref-reset {
float: left; float: left;
} }
.prefs-code {
background-color: var(--bg_elements);
border: 1px solid var(--accent_border);
color: var(--fg_color);
font-size: 13px;
padding: 6px 8px;
margin: 4px 0;
word-break: break-all;
white-space: pre-wrap;
user-select: all;
}
} }

View File

@@ -1,90 +1,88 @@
@import "_variables"; @import '_variables';
nav { nav {
display: flex; display: flex;
align-items: center; align-items: center;
background-color: var(--bg_overlays);
box-shadow: 0 0 4px $shadow;
padding: 0;
width: 100%;
height: 50px;
z-index: 1000;
font-size: 16px;
a,
.icon-button button {
color: var(--fg_nav);
}
body.fixed-nav & {
position: fixed; position: fixed;
} background-color: var(--bg_overlays);
box-shadow: 0 0 4px $shadow;
padding: 0;
width: 100%;
height: 50px;
z-index: 1000;
font-size: 16px;
a, .icon-button button {
color: var(--fg_nav);
}
} }
.inner-nav { .inner-nav {
margin: auto; margin: auto;
box-sizing: border-box; box-sizing: border-box;
padding: 0 10px; padding: 0 10px;
display: flex; display: flex;
align-items: center; align-items: center;
flex-basis: 920px; flex-basis: 920px;
height: 50px; height: 50px;
} }
.site-name { .site-name {
font-size: 15px; font-size: 15px;
font-weight: 600; font-weight: 600;
line-height: 1; line-height: 1;
&:hover { &:hover {
color: var(--accent_light); color: var(--accent_light);
text-decoration: unset; text-decoration: unset;
} }
} }
.site-logo { .site-logo {
display: block; display: block;
width: 35px; width: 35px;
height: 35px; height: 35px;
} }
.nav-item { .nav-item {
display: flex; display: flex;
flex: 1; flex: 1;
line-height: 50px; line-height: 50px;
height: 50px; height: 50px;
overflow: hidden; overflow: hidden;
flex-wrap: wrap; flex-wrap: wrap;
align-items: center; align-items: center;
&.right { &.right {
text-align: right; text-align: right;
justify-content: flex-end; justify-content: flex-end;
} }
&.right a:hover { &.right a {
color: var(--accent_light); padding-left: 4px;
text-decoration: unset;
} &:hover {
color: var(--accent_light);
text-decoration: unset;
}
}
} }
.lp { .lp {
height: 14px; height: 14px;
display: inline-block; margin-top: 2px;
position: relative; display: block;
top: 2px; fill: var(--fg_nav);
fill: var(--fg_nav);
&:hover { &:hover {
fill: var(--accent_light); fill: var(--accent_light);
} }
} }
.icon-info { .icon-info:before {
margin: 0 -3px; margin: 0 -3px;
} }
.icon-cog { .icon-cog {
font-size: 15px; font-size: 15px;
padding-left: 0 !important;
} }

View File

@@ -1,117 +1,83 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
@import "card"; @import 'card';
@import "about-account"; @import 'photo-rail';
@import "photo-rail";
.profile-tabs { .profile-tabs {
@include panel(auto, 900px); @include panel(auto, 900px);
.timeline-container { .timeline-container {
float: right; float: right;
width: 68% !important; width: 68% !important;
max-width: unset; max-width: unset;
} }
} }
.profile-banner { .profile-banner {
margin-bottom: 4px; margin-bottom: 4px;
background-color: var(--bg_panel); background-color: var(--bg_panel);
a { a {
display: block; display: block;
position: relative; position: relative;
padding: 33.34% 0 0 0; padding: 33.34% 0 0 0;
} }
img { img {
max-width: 100%; max-width: 100%;
position: absolute; position: absolute;
top: 0; top: 0;
} }
} }
.profile-tab { .profile-tab {
padding: 0 4px 0 0; padding: 0 4px 0 0;
box-sizing: border-box; box-sizing: border-box;
display: inline-block; display: inline-block;
font-size: 14px; font-size: 14px;
text-align: left; text-align: left;
vertical-align: top; vertical-align: top;
max-width: 32%; max-width: 32%;
top: 0;
body.fixed-nav & {
top: 50px; top: 50px;
}
} }
.profile-result { .profile-result {
min-height: 54px; min-height: 54px;
.username { .username {
margin: 0 !important; margin: 0 !important;
} }
.tweet-header { .tweet-header {
margin-bottom: unset; margin-bottom: unset;
} }
} }
.profile-tabs.media-only { @media(max-width: 700px) {
max-width: none; .profile-tabs {
width: 100%; width: 100vw;
max-width: 600px;
.timeline-container { .timeline-container {
float: none; width: 100% !important;
width: 100% !important;
max-width: none;
padding: 0 10px;
box-sizing: border-box;
}
.timeline-container > .tab { .tab-item wide {
max-width: 900px; flex-grow: 1.4;
margin-left: auto; }
margin-right: auto; }
}
}
@media (max-width: 700px) {
.profile-tabs {
width: 100vw;
max-width: 600px;
.timeline-container {
width: 100% !important;
.tab-item wide {
flex-grow: 1.4;
}
} }
}
.profile-tabs.media-only { .profile-tab {
width: 100%; width: 100%;
max-width: none; max-width: unset;
position: initial !important;
.timeline-container { padding: 0;
width: 100vw !important;
padding: 0;
} }
}
.profile-tab {
width: 100%;
max-width: unset;
position: initial !important;
padding: 0;
}
} }
@media (min-height: 900px) { @media (min-height: 900px) {
.profile-tab.sticky { .profile-tab.sticky {
position: sticky; position: sticky;
} }
} }

View File

@@ -1,71 +0,0 @@
@import '_variables';
.about-account {
max-width: 500px;
width: 100%;
margin: 20px auto 0;
align-self: flex-start;
background: var(--bg_panel);
border-radius: 4px;
padding: 12px 20px 20px;
}
.about-account-header {
display: flex;
flex-direction: column;
align-items: center;
margin-bottom: 16px;
padding-bottom: 14px;
border-bottom: 1px solid var(--border_grey);
}
.about-account-avatar img {
width: 72px;
height: 72px;
border-radius: 50%;
margin-bottom: 4px;
}
.about-account-name {
@include breakable;
font-weight: bold;
}
.about-account-body {
display: flex;
flex-direction: column;
gap: 14px;
}
.about-account-at {
font-size: 18px;
font-weight: bold;
}
.about-account-row {
display: flex;
align-items: center;
gap: 10px;
> span:first-child {
color: var(--fg_faded);
flex-shrink: 0;
}
> div {
display: flex;
flex-direction: column;
}
}
.about-account-label {
color: var(--fg_faded);
font-size: 13px;
}
@media(max-width: 700px) {
.about-account {
max-width: none;
margin: 10px;
}
}

View File

@@ -73,9 +73,9 @@
} }
} }
.profile-joindate, .profile-location, .profile-website { .profile-joindate, .profile-location, profile-website {
color: var(--fg_faded); color: var(--fg_faded);
margin: 1px 0; margin: 2px 0;
width: 100%; width: 100%;
} }
} }
@@ -115,7 +115,7 @@
} }
.profile-card-tabs-name { .profile-card-tabs-name {
flex-shrink: 100; @include breakable;
} }
.profile-card-avatar { .profile-card-avatar {

View File

@@ -1,120 +1,120 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
.search-title { .search-title {
font-weight: bold; font-weight: bold;
display: inline-block; display: inline-block;
margin-top: 4px; margin-top: 4px;
} }
.search-field { .search-field {
display: flex;
flex-wrap: wrap;
button {
margin: 0 2px 0 0;
padding: 0px 1px 1px 4px;
height: 23px;
display: flex; display: flex;
align-items: center; flex-wrap: wrap;
}
.pref-input { button {
margin: 0 4px 0 0; margin: 0 2px 0 0;
flex-grow: 1; height: 23px;
height: 23px; }
}
input[type="text"], .pref-input {
input[type="number"] { margin: 0 4px 0 0;
height: calc(100% - 4px); flex-grow: 1;
width: calc(100% - 8px); height: 23px;
} }
> label { input[type="text"] {
display: inline; height: calc(100% - 4px);
background-color: var(--bg_elements); width: calc(100% - 8px);
color: var(--fg_color); }
border: 1px solid var(--accent_border);
padding: 1px 1px 2px 4px;
font-size: 14px;
cursor: pointer;
margin-bottom: 2px;
@include input-colors; > label {
} display: inline;
background-color: var(--bg_elements);
color: var(--fg_color);
border: 1px solid var(--accent_border);
padding: 1px 6px 2px 6px;
font-size: 14px;
cursor: pointer;
margin-bottom: 2px;
@include create-toggle(search-panel, 380px); @include input-colors;
}
@include create-toggle(search-panel, 200px);
} }
.search-panel { .search-panel {
width: 100%; width: 100%;
max-height: 0; max-height: 0;
overflow: hidden; overflow: hidden;
transition: max-height 0.4s; transition: max-height 0.4s;
flex-grow: 1; flex-grow: 1;
font-weight: initial; font-weight: initial;
text-align: left; text-align: left;
.checkbox-container { > div {
display: inline; line-height: 1.7em;
padding-right: unset; }
margin-bottom: 5px;
margin-left: 23px;
}
.checkbox { .checkbox-container {
right: unset; display: inline;
left: -22px; padding-right: unset;
line-height: 1.6em; margin-bottom: unset;
} margin-left: 23px;
}
.checkbox-container .checkbox:after { .checkbox {
top: -4px; right: unset;
} left: -22px;
}
.checkbox-container .checkbox:after {
top: -4px;
}
} }
.search-row { .search-row {
display: flex; display: flex;
flex-wrap: wrap; flex-wrap: wrap;
line-height: unset; line-height: unset;
> div { > div {
flex-grow: 1; flex-grow: 1;
flex-shrink: 1; flex-shrink: 1;
} }
input {
height: 21px;
}
.pref-input {
display: block;
padding-bottom: 5px;
input { input {
height: 21px; height: 21px;
margin-top: 1px; }
.pref-input {
display: block;
padding-bottom: 5px;
input {
height: 21px;
margin-top: 1px;
}
} }
}
} }
.search-toggles { .search-toggles {
flex-grow: 1; flex-grow: 1;
display: grid; display: grid;
grid-template-columns: repeat(5, auto); grid-template-columns: repeat(6, auto);
grid-column-gap: 10px; grid-column-gap: 10px;
} }
.profile-tabs { .profile-tabs {
@include search-resize(820px, 5); @include search-resize(820px, 5);
@include search-resize(715px, 4); @include search-resize(725px, 4);
@include search-resize(700px, 5); @include search-resize(600px, 6);
@include search-resize(485px, 4); @include search-resize(560px, 5);
@include search-resize(410px, 3); @include search-resize(480px, 4);
@include search-resize(410px, 3);
} }
@include search-resize(700px, 5); @include search-resize(560px, 5);
@include search-resize(485px, 4); @include search-resize(480px, 4);
@include search-resize(410px, 3); @include search-resize(410px, 3);

View File

@@ -1,485 +1,162 @@
@import "_variables"; @import '_variables';
.timeline-container { .timeline-container {
@include panel(100%, 600px); @include panel(100%, 600px);
} }
.timeline > div:not(:first-child) { .timeline {
border-top: 1px solid var(--border_grey); background-color: var(--bg_panel);
> div:not(:first-child) {
border-top: 1px solid var(--border_grey);
}
} }
.timeline-header { .timeline-header {
width: 100%; width: 100%;
background-color: var(--bg_panel); background-color: var(--bg_panel);
text-align: center; text-align: center;
padding: 8px; padding: 8px;
display: block; display: block;
font-weight: bold; font-weight: bold;
margin-bottom: 4px; margin-bottom: 5px;
box-sizing: border-box; box-sizing: border-box;
button { button {
float: unset; float: unset;
} }
} }
.timeline-banner img { .timeline-banner img {
width: 100%; width: 100%;
} }
.timeline-description { .timeline-description {
font-weight: normal; font-weight: normal;
} }
.tab { .tab {
align-items: center; align-items: center;
display: flex; display: flex;
flex-wrap: wrap; flex-wrap: wrap;
list-style: none; list-style: none;
margin: 0 0 4px 0; margin: 0 0 5px 0;
background-color: var(--bg_panel); background-color: var(--bg_panel);
padding: 0; padding: 0;
} }
.tab-item { .tab-item {
flex: 1 1 0; flex: 1 1 0;
text-align: center; text-align: center;
margin-top: 0; margin-top: 0;
a { a {
border-bottom: 0.1rem solid transparent; border-bottom: .1rem solid transparent;
color: var(--tab); color: var(--tab);
display: block; display: block;
padding: 8px 0; padding: 8px 0;
text-decoration: none; text-decoration: none;
font-weight: bold; font-weight: bold;
&:hover { &:hover {
text-decoration: none; text-decoration: none;
}
&.active {
border-bottom-color: var(--tab_selected);
color: var(--tab_selected);
}
} }
&.active { &.active a {
border-bottom-color: var(--tab_selected); border-bottom-color: var(--tab_selected);
color: var(--tab_selected); color: var(--tab_selected);
} }
}
&.active a { &.wide {
border-bottom-color: var(--tab_selected); flex-grow: 1.2;
color: var(--tab_selected); flex-basis: 50px;
} }
&.wide {
flex-grow: 1.2;
flex-basis: 50px;
}
} }
.timeline-footer { .timeline-footer {
background-color: var(--bg_panel); background-color: var(--bg_panel);
padding: 6px 0; padding: 6px 0;
} }
.timeline-protected { .timeline-protected {
text-align: center; text-align: center;
p { p {
margin: 8px 0; margin: 8px 0;
} }
h2 { h2 {
color: var(--accent); color: var(--accent);
font-size: 20px; font-size: 20px;
font-weight: 600; font-weight: 600;
} }
} }
.timeline-none { .timeline-none {
color: var(--accent); color: var(--accent);
font-size: 20px; font-size: 20px;
font-weight: 600; font-weight: 600;
text-align: center; text-align: center;
} }
.timeline-end { .timeline-end {
background-color: var(--bg_panel); background-color: var(--bg_panel);
color: var(--accent); color: var(--accent);
font-size: 16px; font-size: 16px;
font-weight: 600; font-weight: 600;
text-align: center; text-align: center;
} }
.show-more { .show-more {
background-color: var(--bg_panel); background-color: var(--bg_panel);
text-align: center; text-align: center;
padding: 0.75em 0; padding: .75em 0;
display: block !important; display: block !important;
a { a {
background-color: var(--darkest_grey); background-color: var(--darkest_grey);
display: inline-block; display: inline-block;
height: 2em; height: 2em;
padding: 0 2em; padding: 0 2em;
line-height: 2em; line-height: 2em;
&:hover { &:hover {
background-color: var(--darker_grey); background-color: var(--darker_grey);
}
} }
}
} }
.top-ref { .top-ref {
background-color: var(--bg_color); background-color: var(--bg_color);
border-top: none !important; border-top: none !important;
.icon-down { .icon-down {
font-size: 20px; font-size: 20px;
display: flex; display: flex;
justify-content: center; justify-content: center;
text-decoration: none; text-decoration: none;
&:hover { &:hover {
color: var(--accent_light); color: var(--accent_light);
}
&::before {
transform: rotate(180deg) translateY(-1px);
}
} }
&::before {
transform: rotate(180deg) translateY(-1px);
}
}
} }
.timeline-item { .timeline-item {
overflow-wrap: break-word; overflow-wrap: break-word;
border-left-width: 0; border-left-width: 0;
min-width: 0; min-width: 0;
padding: 0.75em; padding: .75em;
display: flex; display: flex;
position: relative;
background-color: var(--bg_panel);
}
.timeline.media-grid-view,
.timeline.media-gallery-view {
> div:not(:first-child) {
border-top: none;
}
.timeline-item::before {
display: none;
}
}
.timeline.media-grid-view,
.timeline.media-gallery-view .gallery-masonry.compact {
.tweet-header,
.replying-to,
.retweet-header,
.pinned,
.tweet-stats,
.attribution,
.poll,
.quote,
.community-note,
.media-tag-block,
.tweet-content,
.card-content {
display: none;
}
.card {
margin: unset;
.card-container {
border: unset;
border-radius: unset;
.card-image-container {
width: 100%;
min-height: 100%;
}
.card-content-container {
display: none;
}
}
}
}
.timeline.media-grid-view {
display: grid;
gap: 4px;
grid-template-columns: repeat(3, minmax(0, 1fr));
> div:not(:first-child) {
margin-top: 0;
}
.timeline-item {
padding: 0;
}
.tweet-link {
z-index: 1000;
&:hover {
background-color: unset;
}
}
> .show-more,
> .top-ref,
> .timeline-footer,
> .timeline-header {
grid-column: 1 / -1;
}
.tweet-body {
height: 100%;
margin-left: 0;
padding: 0;
position: relative; position: relative;
aspect-ratio: 1/1;
}
.gallery-row + .gallery-row {
margin-top: 0.25em !important;
}
.attachments {
background-color: var(--darkest_grey);
border-radius: 0;
margin: 0;
max-height: none;
}
.attachments,
.gallery-row,
.still-image {
height: 100%;
width: 100%;
}
.still-image img,
.attachment > video,
.attachment > img {
object-fit: cover;
height: 100%;
width: 100%;
}
.attachment {
display: flex;
align-items: center;
}
.gallery-video {
height: 100%;
}
.media-gif {
display: flex;
}
.timeline-item:hover {
opacity: 0.85;
}
.alt-text {
display: none;
}
}
.timeline.media-gallery-view {
.gallery-masonry {
margin: 10px 0;
column-gap: 10px;
column-width: unquote("clamp(190px, 22vw, 350px)");
&[data-col-size="small"] {
column-width: unquote("max(130px, 11vw)");
}
&[data-col-size="large"] {
column-width: unquote("clamp(350px, 22vw, 480px)");
}
&.masonry-active {
column-width: unset;
column-gap: unset;
position: relative;
.timeline-item {
animation: none;
position: absolute;
box-sizing: border-box;
margin-bottom: 0;
}
}
&.compact {
.tweet-body {
padding: 0;
> .attachments {
margin: 0;
}
}
.card-image-container img {
max-height: unset;
}
}
}
@keyframes masonry-init {
to {
opacity: 1;
pointer-events: auto;
}
}
// Start hidden. CSS animation reveals after a delay as a no-JS fallback.
// With JS, masonry-active cancels the animation and masonry-visible reveals.
.gallery-masonry .timeline-item,
> .show-more,
> .top-ref,
> .timeline-footer {
opacity: 0;
pointer-events: none;
animation: masonry-init 0.2s 0.3s forwards;
}
.gallery-masonry.masonry-active .timeline-item.masonry-visible,
> .show-more.masonry-visible,
> .top-ref.masonry-visible,
> .timeline-footer.masonry-visible {
opacity: 1;
pointer-events: auto;
transition: opacity 0.15s ease;
animation: none;
}
.timeline-item {
margin-bottom: 10px;
break-inside: avoid;
flex-direction: column;
padding: 0;
}
> .show-more,
> .top-ref,
> .timeline-footer,
> .timeline-header {
margin-left: auto;
margin-right: auto;
max-width: 900px;
}
> .show-more {
padding: 0;
margin-top: 8px;
background-color: unset;
}
.tweet-content {
margin: 3px 0;
}
.tweet-body {
display: flex;
flex-direction: column;
height: 100%;
margin-left: 0;
padding: 10px;
> .attachments {
align-self: stretch;
border-radius: 0;
margin: -10px -10px 10px;
max-height: none;
order: -1;
width: auto;
background-color: var(--bg_elements);
.gallery-row {
max-height: none;
max-width: none;
align-items: center;
}
.still-image img,
.attachment > video,
.attachment > img {
max-height: none;
width: 100%;
}
.attachment:last-child {
max-height: none;
}
.card-container {
border: unset;
border-radius: unset;
}
}
.tweet-stat {
padding-top: unset;
}
.quote {
margin-bottom: 5px;
margin-top: 5px;
}
.replying-to {
margin: 0;
}
}
.tweet-header {
align-items: flex-start;
display: flex;
gap: 0.75em;
margin-bottom: 0;
.tweet-avatar {
img {
float: none;
height: 42px;
margin: 0;
width: 42px;
}
}
.tweet-name-row {
flex: 1;
}
.fullname-and-username {
flex-wrap: wrap;
}
.fullname {
max-width: calc(100% - 18px);
}
.verified-icon {
margin-left: 4px;
margin-top: 1px;
}
.username {
display: block;
flex-basis: 100%;
margin-left: 0;
}
}
}
@media (max-width: 520px) {
.timeline.media-gallery-view {
padding: 8px 0;
}
} }

View File

@@ -1,311 +1,231 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
@import "thread"; @import 'thread';
@import "media"; @import 'media';
@import "video"; @import 'video';
@import "embed"; @import 'embed';
@import "card"; @import 'card';
@import "poll"; @import 'poll';
@import "quote"; @import 'quote';
.tweet-body { .tweet-body {
flex: 1; flex: 1;
min-width: 0; min-width: 0;
margin-left: 58px; margin-left: 58px;
pointer-events: none; pointer-events: none;
z-index: 1; z-index: 1;
} }
.tweet-content { .tweet-content {
line-height: 1.3em; font-family: $font_3;
pointer-events: all; line-height: 1.3em;
display: inline; pointer-events: all;
display: inline;
} }
.tweet-bidi { .tweet-bidi {
display: block !important; display: block !important;
} }
.tweet-header { .tweet-header {
padding: 0; padding: 0;
vertical-align: bottom; vertical-align: bottom;
flex-basis: 100%; flex-basis: 100%;
margin-bottom: 0.2em; margin-bottom: .2em;
a { a {
display: inline-block; display: inline-block;
word-break: break-all; word-break: break-all;
max-width: 100%; max-width: 100%;
pointer-events: all; pointer-events: all;
} }
} }
.tweet-name-row { .tweet-name-row {
padding: 0; padding: 0;
display: flex; display: flex;
justify-content: space-between; justify-content: space-between;
.verified-icon {
margin-left: 2px;
}
} }
.fullname-and-username { .fullname-and-username {
display: flex; display: flex;
min-width: 0; min-width: 0;
} }
.fullname { .fullname {
@include ellipsis; @include ellipsis;
flex-shrink: 2; flex-shrink: 2;
max-width: 80%; max-width: 80%;
font-size: 14px; font-size: 14px;
font-weight: 700; font-weight: 700;
color: var(--fg_color); color: var(--fg_color);
} }
.username { .username {
@include ellipsis; @include ellipsis;
min-width: 1.6em; min-width: 1.6em;
margin-left: 0.4em; margin-left: .4em;
word-wrap: normal; word-wrap: normal;
} }
.tweet-date { .tweet-date {
display: flex; display: flex;
flex-shrink: 0; flex-shrink: 0;
margin-left: 4px; margin-left: 4px;
} }
.tweet-date a, .tweet-date a, .username, .show-more a {
.username, color: var(--fg_dark);
.show-more a {
color: var(--fg_dark);
} }
.tweet-published { .tweet-published {
margin-top: 6px; margin: 0;
margin-bottom: 0px; margin-top: 5px;
color: var(--grey); color: var(--grey);
pointer-events: all; pointer-events: all;
} }
.tweet-avatar { .tweet-avatar {
display: contents !important; display: contents !important;
img { img {
float: left; float: left;
margin-top: 3px; margin-top: 3px;
margin-left: -58px; margin-left: -58px;
width: 48px; width: 48px;
height: 48px; height: 48px;
} }
} }
.avatar { .avatar {
&.round { position: absolute;
border-radius: 50%;
user-select: none;
-webkit-user-select: none;
}
&.mini { &.round {
position: unset; border-radius: 50%;
margin-right: 5px; }
margin-top: -1px;
width: 20px; &.mini {
height: 20px; position: unset;
} margin-right: 5px;
margin-top: -1px;
width: 20px;
height: 20px;
}
} }
.tweet-embed { .tweet-embed {
display: flex;
flex-direction: column;
justify-content: center;
height: 100%;
background-color: var(--bg_panel);
.tweet-content {
font-size: 18px;
}
.tweet-body {
display: flex; display: flex;
flex-direction: column; flex-direction: column;
max-height: calc(100vh - 0.75em * 2); justify-content: center;
} height: 100%;
background-color: var(--bg_panel);
.card-image img { .tweet-content {
height: auto; font-size: 18px
} }
.avatar { .tweet-body {
position: absolute; display: flex;
} flex-direction: column;
max-height: calc(100vh - 0.75em * 2);
}
} }
.attribution { .attribution {
display: flex; display: flex;
pointer-events: all; pointer-events: all;
margin: 5px 0; margin: 5px 0;
strong { strong {
color: var(--fg_color); color: var(--fg_color);
} }
} }
.media-tag-block { .media-tag-block {
padding-top: 5px; padding-top: 5px;
pointer-events: all; pointer-events: all;
color: var(--fg_faded);
.icon-container {
padding-right: 2px;
}
.media-tag,
.icon-container {
color: var(--fg_faded); color: var(--fg_faded);
}
.icon-container {
padding-right: 2px;
}
.media-tag, .icon-container {
color: var(--fg_faded);
}
} }
.timeline-container .media-tag-block { .timeline-container .media-tag-block {
font-size: 13px; font-size: 13px;
} }
.tweet-geo { .tweet-geo {
color: var(--fg_faded); color: var(--fg_faded);
} }
.replying-to { .replying-to {
color: var(--fg_faded); color: var(--fg_faded);
margin: -2px 0 4px; margin: -2px 0 4px;
a { a {
pointer-events: all; pointer-events: all;
} }
} }
.retweet-header, .retweet-header, .pinned, .tweet-stats {
.pinned, align-content: center;
.tweet-stats { color: var(--grey);
align-content: center; display: flex;
color: var(--grey); flex-shrink: 0;
display: flex; flex-wrap: wrap;
flex-shrink: 0; font-size: 14px;
flex-wrap: wrap; font-weight: 600;
font-size: 14px; line-height: 22px;
font-weight: 600;
line-height: 22px;
span { span {
@include ellipsis; @include ellipsis;
} }
} }
.retweet-header { .retweet-header {
margin-top: -5px !important; margin-top: -5px !important;
} }
.tweet-stats { .tweet-stats {
margin-bottom: -3px; margin-bottom: -3px;
user-select: none;
-webkit-user-select: none;
} }
.tweet-stat { .tweet-stat {
padding-top: 5px; padding-top: 5px;
min-width: 1em; min-width: 1em;
margin-right: 0.8em; margin-right: 0.8em;
} }
.show-thread { .show-thread {
display: block; display: block;
pointer-events: all; pointer-events: all;
padding-top: 2px; padding-top: 2px;
} }
.unavailable-box { .unavailable-box {
width: 100%; width: 100%;
height: 100%; height: 100%;
padding: 12px; padding: 12px;
border: solid 1px var(--dark_grey); border: solid 1px var(--dark_grey);
box-sizing: border-box; box-sizing: border-box;
border-radius: 10px; border-radius: 10px;
background-color: var(--bg_color); background-color: var(--bg_color);
z-index: 2; z-index: 2;
} }
.tweet-link { .tweet-link {
height: 100%; height: 100%;
width: 100%; width: 100%;
left: 0; left: 0;
top: 0; top: 0;
position: absolute; position: absolute;
user-select: none;
-webkit-user-select: none;
&:hover { &:hover {
background-color: var(--bg_hover); background-color: var(--bg_hover);
} }
}
.latest-post-version {
border-bottom: 1px solid var(--dark_grey);
border-top: 1px solid var(--dark_grey);
padding: 01ch 0px;
margin: 1ch 0px;
color: var(--grey);
a {
pointer-events: all;
}
}
.community-note {
background-color: var(--bg_elements);
margin-top: 10px;
border: solid 1px var(--dark_grey);
border-radius: 10px;
overflow: hidden;
pointer-events: all;
&:hover {
background-color: var(--bg_panel);
border-color: var(--grey);
}
}
.community-note-header {
background-color: var(--bg_hover);
font-weight: 700;
padding: 8px 10px;
padding-top: 6px;
display: flex;
align-items: center;
gap: 2px;
.icon-container {
flex-shrink: 0;
color: var(--accent);
}
}
.community-note-text {
white-space: pre-line;
padding: 10px 10px;
padding-top: 6px;
}
.disclosures {
display: flex;
flex-direction: column;
color: var(--grey);
font-size: 14px;
margin-top: 4px;
margin-bottom: -2px;
.icon-attention {
margin-right: -3px;
}
} }

View File

@@ -1,119 +1,118 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
.card { .card {
margin: 5px 0; margin: 5px 0;
pointer-events: all; pointer-events: all;
max-height: unset; max-height: unset;
} }
.card-container { .card-container {
border: solid 1px var(--dark_grey); border-radius: 10px;
border-radius: 10px; border-width: 1px;
background-color: var(--bg_elements); border-style: solid;
overflow: hidden; border-color: var(--dark_grey);
color: inherit; background-color: var(--bg_elements);
display: flex; overflow: hidden;
flex-direction: row; color: inherit;
text-decoration: none !important; display: flex;
flex-direction: row;
text-decoration: none !important;
&:hover { &:hover {
border-color: var(--grey); border-color: var(--grey);
} }
.attachments { .attachments {
margin: 0; margin: 0;
border-radius: 0; border-radius: 0;
} }
} }
.card-content { .card-content {
padding: 0.5em; padding: 0.5em;
} }
.card-title { .card-title {
@include ellipsis; @include ellipsis;
white-space: unset; white-space: unset;
font-weight: bold; font-weight: bold;
font-size: 1.1em; font-size: 1.1em;
} }
.card-description { .card-description {
margin: 0.3em 0; margin: 0.3em 0;
white-space: pre-wrap;
} }
.card-destination { .card-destination {
@include ellipsis; @include ellipsis;
color: var(--grey); color: var(--grey);
display: block; display: block;
} }
.card-content-container { .card-content-container {
color: unset; color: unset;
overflow: auto; overflow: auto;
&:hover {
&:hover { text-decoration: none;
text-decoration: none; }
}
} }
.card-image-container { .card-image-container {
width: 98px; width: 98px;
flex-shrink: 0; flex-shrink: 0;
position: relative; position: relative;
overflow: hidden; overflow: hidden;
&:before {
&:before { content: "";
content: ""; display: block;
display: block; padding-top: 100%;
padding-top: 100%; }
}
} }
.card-image { .card-image {
position: absolute; position: absolute;
top: 0; top: 0;
left: 0; left: 0;
bottom: 0; bottom: 0;
right: 0; right: 0;
background-color: var(--bg_overlays); background-color: var(--bg_overlays);
img { img {
width: 100%; width: 100%;
height: 100%; height: 100%;
max-height: 400px; max-height: 400px;
display: block; display: block;
object-fit: cover; object-fit: cover;
} }
} }
.card-overlay { .card-overlay {
@include play-button; @include play-button;
opacity: 0.8; opacity: 0.8;
display: flex; display: flex;
justify-content: center; justify-content: center;
align-items: center; align-items: center;
} }
.large { .large {
.card-container { .card-container {
display: block; display: block;
}
.card-image-container {
width: unset;
&:before {
display: none;
} }
}
.card-image { .card-image-container {
position: unset; width: unset;
border-style: solid;
border-color: var(--dark_grey); &:before {
border-width: 0; display: none;
border-bottom-width: 1px; }
} }
.card-image {
position: unset;
border-style: solid;
border-color: var(--dark_grey);
border-width: 0;
border-bottom-width: 1px;
}
} }

View File

@@ -1,17 +1,17 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
.embed-video { .embed-video {
.gallery-video { .gallery-video {
width: 100%; width: 100%;
height: 100%; height: 100%;
position: absolute; position: absolute;
background-color: black; background-color: black;
top: 0%; top: 0%;
left: 0%; left: 0%;
} }
.gallery-video > .attachment { .video-container {
max-height: unset; max-height: unset;
} }
} }

View File

@@ -1,165 +1,119 @@
@import "_variables"; @import '_variables';
.gallery-row { .gallery-row {
display: flex; display: flex;
flex-direction: row; flex-direction: row;
flex-wrap: nowrap; flex-wrap: nowrap;
overflow: hidden; align-items: center;
flex-grow: 1; overflow: hidden;
max-height: 379.5px; flex-grow: 1;
max-width: 533px; max-height: 379.5px;
pointer-events: all; max-width: 533px;
pointer-events: all;
&.mixed-row {
.attachment {
min-width: 0;
min-height: 0;
flex: 1 1 0;
max-height: 379.5px;
display: flex;
align-items: center;
justify-content: center;
background-color: #101010;
}
.still-image,
.still-image img,
.attachment > video,
.attachment > img {
width: 100%;
height: 100%;
max-width: none;
max-height: none;
}
.still-image { .still-image {
display: flex; width: 100%;
align-self: stretch; display: flex;
} }
.still-image img {
flex-basis: auto;
flex-grow: 0;
object-fit: cover;
}
.attachment > video,
.attachment > img {
object-fit: cover;
}
.attachment > video {
object-fit: contain;
}
}
} }
.attachments { .attachments {
margin-top: 0.35em; margin-top: .35em;
display: flex; display: flex;
flex-direction: row; flex-direction: row;
width: 100%; width: 100%;
max-height: 600px; max-height: 600px;
border-radius: 7px; border-radius: 7px;
overflow: hidden; overflow: hidden;
flex-flow: column; flex-flow: column;
background-color: var(--bg_color); background-color: var(--bg_color);
align-items: center; align-items: center;
pointer-events: all; pointer-events: all;
.image-attachment {
width: 100%;
}
} }
.attachment { .attachment {
position: relative; position: relative;
line-height: 0; line-height: 0;
overflow: hidden; overflow: hidden;
margin: 0 0.25em 0 0; margin: 0 .25em 0 0;
flex-grow: 1; flex-grow: 1;
box-sizing: border-box; box-sizing: border-box;
min-width: 2em; min-width: 2em;
&:last-child { &:last-child {
margin: 0; margin: 0;
max-height: 530px;
}
}
.gallery-gif video {
max-height: 530px; max-height: 530px;
} background-color: #101010;
}
.media-gif {
display: table;
background-color: unset;
width: unset;
max-height: unset;
}
.media-gif video {
max-height: 530px;
background-color: #101010;
} }
.still-image { .still-image {
max-height: 379.5px;
max-width: 533px;
img {
object-fit: cover;
max-width: 100%;
max-height: 379.5px; max-height: 379.5px;
flex-basis: 300px; max-width: 533px;
flex-grow: 1; justify-content: center;
}
img {
object-fit: cover;
max-width: 100%;
max-height: 379.5px;
flex-basis: 300px;
flex-grow: 1;
}
} }
.alt-text { .image {
margin: 0px; display: inline-block;
padding: 11px 7px;
box-sizing: border-box;
position: absolute;
bottom: 10px;
left: 10px;
width: 2.98em;
max-height: 25px;
white-space: pre;
overflow: hidden;
border-radius: 10px;
color: var(--fg_color);
font-size: 12px;
font-weight: bold;
background: rgba(0, 0, 0, 0.5);
backdrop-filter: blur(12px);
} }
.alt-text:hover { // .single-image {
padding: 7px; // display: inline-block;
width: Min(230px, calc(100% - 10px * 2)); // width: 100%;
max-height: calc(100% - 10px); // max-height: 600px;
line-height: 1.2em;
white-space: pre-wrap; // .attachments {
transition-duration: 0.4s; // width: unset;
transition-property: max-height; // max-height: unset;
} // display: inherit;
// }
// }
.overlay-circle { .overlay-circle {
border-radius: 50%; border-radius: 50%;
background-color: var(--dark_grey); background-color: var(--dark_grey);
width: 40px; width: 40px;
height: 40px; height: 40px;
align-items: center; align-items: center;
display: flex; display: flex;
border-width: 5px; border-width: 5px;
border-color: var(--play_button); border-color: var(--play_button);
border-style: solid; border-style: solid;
} }
.overlay-triangle { .overlay-triangle {
width: 0; width: 0;
height: 0; height: 0;
border-style: solid; border-style: solid;
border-width: 12px 0 12px 17px; border-width: 12px 0 12px 17px;
border-color: transparent transparent transparent var(--play_button); border-color: transparent transparent transparent var(--play_button);
margin-left: 14px; margin-left: 14px;
}
.media-gif {
display: table;
background-color: unset;
width: unset;
} }
.media-body { .media-body {
flex: 1; flex: 1;
padding: 0; padding: 0;
white-space: pre-wrap; white-space: pre-wrap;
} }

View File

@@ -1,42 +1,42 @@
@import "_variables"; @import '_variables';
.poll-meter { .poll-meter {
overflow: hidden; overflow: hidden;
position: relative; position: relative;
margin: 6px 0; margin: 6px 0;
height: 26px; height: 26px;
background: var(--bg_color); background: var(--bg_color);
border-radius: 5px; border-radius: 5px;
display: flex; display: flex;
align-items: center; align-items: center;
} }
.poll-choice-bar { .poll-choice-bar {
height: 100%; height: 100%;
position: absolute; position: absolute;
background: var(--dark_grey); background: var(--dark_grey);
} }
.poll-choice-value { .poll-choice-value {
position: relative; position: relative;
font-weight: bold; font-weight: bold;
margin-left: 5px; margin-left: 5px;
margin-right: 6px; margin-right: 6px;
min-width: 30px; min-width: 30px;
text-align: right; text-align: right;
pointer-events: all; pointer-events: all;
} }
.poll-choice-option { .poll-choice-option {
position: relative; position: relative;
pointer-events: all; pointer-events: all;
} }
.poll-info { .poll-info {
color: var(--grey); color: var(--grey);
pointer-events: all; pointer-events: all;
} }
.leader .poll-choice-bar { .leader .poll-choice-bar {
background: var(--accent_dark); background: var(--accent_dark);
} }

View File

@@ -1,120 +1,94 @@
@import "_variables"; @import '_variables';
.quote { .quote {
margin-top: 10px; margin-top: 10px;
border: solid 1px var(--dark_grey); border: solid 1px var(--dark_grey);
border-radius: 10px; border-radius: 10px;
background-color: var(--bg_elements); background-color: var(--bg_elements);
overflow: hidden;
pointer-events: all;
position: relative;
width: 100%;
&:hover {
border-color: var(--grey);
}
&.unavailable:hover {
border-color: var(--dark_grey);
}
.tweet-name-row {
padding: 8px 10px 6px 10px;
}
.quote-text {
overflow: hidden; overflow: hidden;
white-space: pre-wrap; pointer-events: all;
word-wrap: break-word; position: relative;
padding: 10px; width: 100%;
padding-top: 0;
}
.show-thread {
padding: 0px 10px 6px 10px;
margin-top: -6px;
}
.quote-latest {
padding: 0px 10px 6px 10px;
color: var(--grey);
}
.replying-to {
padding: 0px 10px;
padding-bottom: 4px;
margin: unset;
}
.community-note {
background-color: var(--bg_panel);
border: unset;
border-top: solid 1px var(--dark_grey);
border-radius: unset;
margin-top: 0;
&:hover { &:hover {
border-top-color: var(--grey); border-color: var(--grey);
} }
.community-note-header { &.unavailable:hover {
background-color: var(--bg_panel); border-color: var(--dark_grey);
padding-bottom: 0; }
.tweet-name-row {
padding: 6px 8px;
margin-top: 1px;
}
.quote-text {
overflow: hidden;
white-space: pre-wrap;
word-wrap: break-word;
padding: 0px 8px 8px 8px;
}
.show-thread {
padding: 0px 8px 6px 8px;
margin-top: -6px;
}
.replying-to {
padding: 0px 8px;
margin: unset;
} }
}
} }
.unavailable-quote { .unavailable-quote {
padding: 12px; padding: 12px;
display: block;
} }
.quote-link { .quote-link {
width: 100%; width: 100%;
height: 100%; height: 100%;
left: 0; left: 0;
top: 0; top: 0;
position: absolute; position: absolute;
} }
.quote-media-container { .quote-media-container {
max-height: 300px;
display: flex;
.card {
margin: unset;
}
.attachments {
border-radius: 0;
}
.media-gif {
width: 100%;
display: flex;
justify-content: center;
}
.media-gif > .attachment {
display: flex;
justify-content: center;
background-color: var(--bg_color);
video {
height: unset;
width: unset;
max-height: 100%;
max-width: 100%;
}
}
.gallery-row .attachment,
.gallery-row .attachment > video,
.gallery-row .attachment > img {
max-height: 300px; max-height: 300px;
} display: flex;
.still-image img { .card {
max-height: 250px; margin: unset;
} }
.attachments {
border-radius: 0;
}
.media-gif {
width: 100%;
display: flex;
justify-content: center;
}
.gallery-gif .attachment {
display: flex;
justify-content: center;
background-color: var(--bg_color);
video {
height: unset;
width: unset;
max-height: 100%;
max-width: 100%;
}
}
.gallery-video, .gallery-gif {
max-height: 300px;
}
.still-image img {
max-height: 250px
}
} }

View File

@@ -1,154 +1,113 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
.conversation, .conversation {
.edit-history { @include panel(100%, 600px);
@include panel(100%, 600px);
.show-more { .show-more {
margin-bottom: 10px; margin-bottom: 10px;
} }
} }
.main-thread, .main-thread {
.latest-edit { margin-bottom: 20px;
margin-bottom: 20px; background-color: var(--bg_panel);
} }
.reply { .main-tweet, .replies {
margin-bottom: 10px;
}
.main-tweet,
.replies,
.edit-history > div {
body.fixed-nav & {
padding-top: 50px; padding-top: 50px;
margin-top: -50px; margin-top: -50px;
}
}
.edit-history-header {
padding: 10px;
margin-bottom: 5px;
font-size: 16px;
font-weight: bold;
background-color: var(--bg_panel);
}
.tweet-edit {
margin-bottom: 5px;
} }
.main-tweet .tweet-content { .main-tweet .tweet-content {
font-size: 18px; font-size: 18px;
} }
@media (max-width: 600px) {
.main-tweet .tweet-content { @media(max-width: 600px) {
font-size: 16px; .main-tweet .tweet-content {
} font-size: 16px;
}
}
.reply {
background-color: var(--bg_panel);
margin-bottom: 10px;
} }
.thread-line { .thread-line {
.timeline-item::before, .timeline-item::before,
&.timeline-item::before { &.timeline-item::before {
background: var(--accent_dark); background: var(--accent_dark);
content: ""; content: '';
position: relative; position: relative;
min-width: 3px; min-width: 3px;
width: 3px; width: 3px;
left: 26px; left: 26px;
border-radius: 2px; border-radius: 2px;
margin-left: -3px; margin-left: -3px;
margin-bottom: 37px; margin-bottom: 37px;
top: 56px; top: 56px;
z-index: 1; z-index: 1;
pointer-events: none; pointer-events: none;
} }
.with-header:not(:first-child)::after { .with-header:not(:first-child)::after {
background: var(--accent_dark); background: var(--accent_dark);
content: ""; content: '';
position: relative; position: relative;
float: left; float: left;
min-width: 3px; min-width: 3px;
width: 3px; width: 3px;
right: calc(100% - 26px); right: calc(100% - 26px);
border-radius: 2px; border-radius: 2px;
margin-left: -3px; margin-left: -3px;
margin-bottom: 37px; margin-bottom: 37px;
bottom: 10px; bottom: 10px;
height: 30px; height: 30px;
z-index: 1; z-index: 1;
pointer-events: none; pointer-events: none;
} }
.unavailable::before { .unavailable::before {
top: 48px; top: 48px;
margin-bottom: 28px; margin-bottom: 28px;
} }
.more-replies::before { .more-replies::before {
content: "..."; content: '...';
background: unset; background: unset;
color: var(--more_replies_dots); color: var(--more_replies_dots);
font-weight: bold; font-weight: bold;
font-size: 20px; font-size: 20px;
line-height: 0.25em; line-height: 0.25em;
left: 1.2em; left: 1.2em;
width: 5px; width: 5px;
top: 2px; top: 2px;
margin-bottom: 0; margin-bottom: 0;
margin-left: -2.5px; margin-left: -2.5px;
} }
.earlier-replies { .earlier-replies {
padding-bottom: 0; padding-bottom: 0;
margin-bottom: -5px; margin-bottom: -5px;
} }
} }
.timeline-item.thread-last::before { .timeline-item.thread-last::before {
background: unset; background: unset;
min-width: unset; min-width: unset;
width: 0; width: 0;
margin: 0; margin: 0;
} }
.more-replies { .more-replies {
padding-top: 0.3em !important; padding-top: 0.3em !important;
} }
.more-replies-text { .more-replies-text {
@include ellipsis; @include ellipsis;
display: block; display: block;
margin-left: 58px; margin-left: 58px;
padding: 7px 0; padding: 7px 0;
}
.timeline-item.thread.more-replies-thread {
padding: 0 0.75em;
&::before {
top: 40px;
margin-bottom: 31px;
}
.more-replies {
display: flex;
padding-top: unset !important;
margin-top: 8px;
&::before {
display: inline-block;
position: relative;
top: -1px;
line-height: 0.4em;
}
.more-replies-text {
display: inline;
}
}
} }

View File

@@ -1,77 +1,66 @@
@import "_variables"; @import '_variables';
@import "_mixins"; @import '_mixins';
video { video {
height: 100%; max-height: 100%;
width: 100%; max-width: 100%;
} }
.gallery-video { .gallery-video {
display: flex; display: flex;
overflow: hidden; overflow: hidden;
}
&.card-container {
flex-direction: column;
width: 100%;
}
> .attachment { .gallery-video.card-container {
min-height: 80px; flex-direction: column;
min-width: 200px; }
.video-container {
max-height: 530px; max-height: 530px;
margin: 0; margin: 0;
display: flex;
align-items: center;
justify-content: center;
img { img {
max-height: 100%; max-height: 100%;
max-width: 100%; max-width: 100%;
} }
}
} }
.video-overlay { .video-overlay {
@include play-button; @include play-button;
background-color: $shadow; background-color: $shadow;
p { p {
position: relative; position: relative;
z-index: 0; z-index: 0;
text-align: center; text-align: center;
top: calc(50% - 20px); top: calc(50% - 20px);
font-size: 20px; font-size: 20px;
line-height: 1.3; line-height: 1.3;
margin: 0 20px; margin: 0 20px;
} }
.overlay-circle { div {
position: relative; position: relative;
z-index: 0; z-index: 0;
top: calc(50% - 20px); top: calc(50% - 20px);
margin: 0 auto; margin: 0 auto;
width: 40px; width: 40px;
height: 40px; height: 40px;
} }
.overlay-duration { form {
position: absolute; width: 100%;
bottom: 8px; height: 100%;
left: 8px; align-items: center;
background-color: #0000007a; justify-content: center;
line-height: 1em; display: flex;
padding: 4px 6px 4px 6px; }
border-radius: 5px;
font-weight: bold;
}
form { button {
width: 100%; padding: 5px 8px;
height: 100%; font-size: 16px;
align-items: center; }
justify-content: center;
display: flex;
}
button {
padding: 5px 8px;
font-size: 16px;
}
} }

View File

@@ -1,62 +0,0 @@
import std/[asyncdispatch, base64, httpclient, random, strutils, sequtils, times]
import nimcrypto
import experimental/parser/tid
randomize()
const defaultKeyword = "obfiowerehiring";
const pairsUrl =
"https://raw.githubusercontent.com/fa0311/x-client-transaction-id-pair-dict/refs/heads/main/pair.json";
var
cachedPairs: seq[TidPair] = @[]
lastCached = 0
# refresh every hour
ttlSec = 60 * 60
proc getPair(): Future[TidPair] {.async.} =
if cachedPairs.len == 0 or int(epochTime()) - lastCached > ttlSec:
lastCached = int(epochTime())
let client = newAsyncHttpClient()
defer: client.close()
let resp = await client.get(pairsUrl)
if resp.status == $Http200:
cachedPairs = parseTidPairs(await resp.body)
return sample(cachedPairs)
proc encodeSha256(text: string): array[32, byte] =
let
data = cast[ptr byte](addr text[0])
dataLen = uint(len(text))
digest = sha256.digest(data, dataLen)
return digest.data
proc encodeBase64[T](data: T): string =
return encode(data).replace("=", "")
proc decodeBase64(data: string): seq[byte] =
return cast[seq[byte]](decode(data))
proc genTid*(path: string): Future[string] {.async.} =
let
pair = await getPair()
timeNow = int(epochTime() - 1682924400)
timeNowBytes = @[
byte(timeNow and 0xff),
byte((timeNow shr 8) and 0xff),
byte((timeNow shr 16) and 0xff),
byte((timeNow shr 24) and 0xff)
]
data = "GET!" & path & "!" & $timeNow & defaultKeyword & pair.animationKey
hashBytes = encodeSha256(data)
keyBytes = decodeBase64(pair.verification)
bytesArr = keyBytes & timeNowBytes & hashBytes[0 ..< 16] & @[3'u8]
randomNum = byte(rand(256))
tid = @[randomNum] & bytesArr.mapIt(it xor randomNum)
return encodeBase64(tid)

154
src/tokens.nim Normal file
View File

@@ -0,0 +1,154 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, times, sequtils, json, random
import strutils, tables
import zippy
import types, consts, http_pool
const
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
maxAge = 2.hours + 55.minutes # tokens expire after 3 hours
failDelay = initDuration(minutes=30)
var
clientPool: HttpPool
tokenPool: seq[Token]
lastFailed: Time
proc getPoolJson*(): JsonNode =
var
list = newJObject()
totalReqs = 0
totalPending = 0
reqsPerApi: Table[string, int]
for token in tokenPool:
totalPending.inc(token.pending)
list[token.tok] = %*{
"apis": newJObject(),
"pending": token.pending,
"init": $token.init,
"lastUse": $token.lastUse
}
for api in token.apis.keys:
list[token.tok]["apis"][$api] = %token.apis[api]
let
maxReqs =
case api
of Api.listMembers, Api.listBySlug, Api.list, Api.userRestId: 500
of Api.timeline: 187
else: 180
reqs = maxReqs - token.apis[api].remaining
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
totalReqs.inc(reqs)
return %*{
"amount": tokenPool.len,
"requests": totalReqs,
"pending": totalPending,
"apis": reqsPerApi,
"tokens": list
}
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited")
proc fetchToken(): Future[Token] {.async.} =
if getTime() - lastFailed < failDelay:
raise rateLimitError()
let headers = newHttpHeaders({
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"accept-encoding": "gzip",
"accept-language": "en-US,en;q=0.5",
"connection": "keep-alive",
"authorization": auth
})
try:
let
resp = clientPool.use(headers): await c.postContent(activate)
tokNode = parseJson(uncompress(resp))["guest_token"]
tok = tokNode.getStr($(tokNode.getInt))
time = getTime()
return Token(tok: tok, init: time, lastUse: time)
except Exception as e:
lastFailed = getTime()
echo "fetching token failed: ", e.msg
proc expired(token: Token): bool =
let time = getTime()
token.init < time - maxAge or token.lastUse < time - maxLastUse
proc isLimited(token: Token; api: Api): bool =
if token.isNil or token.expired:
return true
if api in token.apis:
let limit = token.apis[api]
return (limit.remaining <= 10 and limit.reset > epochTime().int)
else:
return false
proc isReady(token: Token; api: Api): bool =
not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api))
proc release*(token: Token; used=false; invalid=false) =
if token.isNil: return
if invalid or token.expired:
let idx = tokenPool.find(token)
if idx > -1: tokenPool.delete(idx)
elif used:
dec token.pending
token.lastUse = getTime()
proc getToken*(api: Api): Future[Token] {.async.} =
for i in 0 ..< tokenPool.len:
if result.isReady(api): break
release(result)
result = tokenPool.sample()
if not result.isReady(api):
release(result)
result = await fetchToken()
tokenPool.add result
if not result.isNil:
inc result.pending
else:
raise rateLimitError()
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
# avoid undefined behavior in race conditions
if api in token.apis:
let limit = token.apis[api]
if limit.reset >= reset and limit.remaining < remaining:
return
token.apis[api] = RateLimit(remaining: remaining, reset: reset)
proc poolTokens*(amount: int) {.async.} =
var futs: seq[Future[Token]]
for i in 0 ..< amount:
futs.add fetchToken()
for token in futs:
var newToken: Token
try: newToken = await token
except: discard
if not newToken.isNil:
tokenPool.add newToken
proc initTokenPool*(cfg: Config) {.async.} =
clientPool = HttpPool()
while true:
if tokenPool.countIt(not it.isLimited(Api.timeline)) < cfg.minTokens:
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
await sleepAsync(2000)

View File

@@ -6,75 +6,45 @@ genPrefsType()
type type
RateLimitError* = object of CatchableError RateLimitError* = object of CatchableError
NoSessionsError* = object of CatchableError
InternalError* = object of CatchableError InternalError* = object of CatchableError
BadClientError* = object of CatchableError
TimelineKind* {.pure.} = enum Api* {.pure.} = enum
tweets, replies, media userShow
timeline
ApiUrl* = object search
endpoint*: string tweet
params*: seq[(string, string)] list
listBySlug
ApiReq* = object listMembers
oauth*: ApiUrl userRestId
cookie*: ApiUrl status
RateLimit* = object RateLimit* = object
limit*: int
remaining*: int remaining*: int
reset*: int reset*: int
SessionKind* = enum Token* = ref object
oauth tok*: string
cookie init*: Time
lastUse*: Time
Session* = ref object
id*: int64
username*: string
pending*: int pending*: int
limited*: bool apis*: Table[Api, RateLimit]
limitedAt*: int
apis*: Table[string, RateLimit]
case kind*: SessionKind
of oauth:
oauthToken*: string
oauthSecret*: string
of cookie:
authToken*: string
ct0*: string
Error* = enum Error* = enum
null = 0 null = 0
noUserMatches = 17 noUserMatches = 17
protectedUser = 22 protectedUser = 22
missingParams = 25
timeout = 29
couldntAuth = 32 couldntAuth = 32
doesntExist = 34 doesntExist = 34
unauthorized = 37
invalidParam = 47
userNotFound = 50 userNotFound = 50
suspended = 63 suspended = 63
rateLimited = 88 rateLimited = 88
expiredToken = 89 invalidToken = 89
listIdOrSlug = 112 listIdOrSlug = 112
tweetNotFound = 144 tweetNotFound = 144
tweetNotAuthorized = 179
forbidden = 200 forbidden = 200
badRequest = 214
badToken = 239 badToken = 239
locked = 326
noCsrf = 353 noCsrf = 353
tweetUnavailable = 421
tweetCensored = 422
VerifiedType* = enum
none = "None"
blue = "Blue"
business = "Business"
government = "Government"
User* = object User* = object
id*: string id*: string
@@ -91,42 +61,11 @@ type
tweets*: int tweets*: int
likes*: int likes*: int
media*: int media*: int
verifiedType*: VerifiedType verified*: bool
protected*: bool protected*: bool
suspended*: bool suspended*: bool
joinDate*: DateTime joinDate*: DateTime
AccountInfo* = object
username*: string
fullname*: string
userPic*: string
joinDate*: DateTime
verifiedType*: VerifiedType
suspended*: bool
basedIn*: string
source*: string
usernameChanges*: int
lastUsernameChange*: DateTime
affiliateUsername*: string
affiliateLabel*: string
isIdentityVerified*: bool
verifiedSince*: DateTime
overrideVerifiedYear*: int
Broadcast* = object
id*: string
title*: string
state*: string
thumb*: string
mediaKey*: string
m3u8Url*: string
totalWatched*: int
startTime*: DateTime
endTime*: DateTime
replayStart*: int
availableForReplay*: bool
user*: User
VideoType* = enum VideoType* = enum
m3u8 = "application/x-mpegURL" m3u8 = "application/x-mpegURL"
mp4 = "video/mp4" mp4 = "video/mp4"
@@ -136,12 +75,12 @@ type
contentType*: VideoType contentType*: VideoType
url*: string url*: string
bitrate*: int bitrate*: int
resolution*: int
Video* = object Video* = object
durationMs*: int durationMs*: int
url*: string url*: string
thumb*: string thumb*: string
views*: string
available*: bool available*: bool
reason*: string reason*: string
title*: string title*: string
@@ -154,7 +93,6 @@ type
Query* = object Query* = object
kind*: QueryKind kind*: QueryKind
view*: string
text*: string text*: string
filters*: seq[string] filters*: seq[string]
includes*: seq[string] includes*: seq[string]
@@ -162,33 +100,12 @@ type
fromUser*: seq[string] fromUser*: seq[string]
since*: string since*: string
until*: string until*: string
minLikes*: string near*: string
sep*: string sep*: string
Gif* = object Gif* = object
url*: string url*: string
thumb*: string thumb*: string
altText*: string
Photo* = object
url*: string
altText*: string
MediaKind* = enum
photoMedia
videoMedia
gifMedia
Media* = object
case kind*: MediaKind
of photoMedia:
photo*: Photo
of videoMedia:
video*: Video
of gifMedia:
gif*: Gif
MediaEntities* = seq[Media]
GalleryPhoto* = object GalleryPhoto* = object
url*: string url*: string
@@ -227,10 +144,8 @@ type
imageDirectMessage = "image_direct_message" imageDirectMessage = "image_direct_message"
audiospace = "audiospace" audiospace = "audiospace"
newsletterPublication = "newsletter_publication" newsletterPublication = "newsletter_publication"
jobDetails = "job_details"
hidden
unknown unknown
Card* = object Card* = object
kind*: CardKind kind*: CardKind
url*: string url*: string
@@ -244,7 +159,7 @@ type
replies*: int replies*: int
retweets*: int retweets*: int
likes*: int likes*: int
views*: int quotes*: int
Tweet* = ref object Tweet* = ref object
id*: int64 id*: int64
@@ -259,8 +174,6 @@ type
available*: bool available*: bool
tombstone*: string tombstone*: string
location*: string location*: string
# Unused, needed for backwards compat
source*: string
stats*: TweetStats stats*: TweetStats
retweet*: Option[Tweet] retweet*: Option[Tweet]
attribution*: Option[User] attribution*: Option[User]
@@ -268,13 +181,9 @@ type
quote*: Option[Tweet] quote*: Option[Tweet]
card*: Option[Card] card*: Option[Card]
poll*: Option[Poll] poll*: Option[Poll]
media*: MediaEntities gif*: Option[Gif]
history*: seq[int64] video*: Option[Video]
note*: string photos*: seq[string]
isAd*: bool
isAI*: bool
Tweets* = seq[Tweet]
Result*[T] = object Result*[T] = object
content*: seq[T] content*: seq[T]
@@ -283,7 +192,7 @@ type
query*: Query query*: Query
Chain* = object Chain* = object
content*: Tweets content*: seq[Tweet]
hasMore*: bool hasMore*: bool
cursor*: string cursor*: string
@@ -293,18 +202,13 @@ type
after*: Chain after*: Chain
replies*: Result[Chain] replies*: Result[Chain]
EditHistory* = object Timeline* = Result[Tweet]
latest*: Tweet
history*: Tweets
Timeline* = Result[Tweets]
Profile* = object Profile* = object
user*: User user*: User
photoRail*: PhotoRail photoRail*: PhotoRail
pinned*: Option[Tweet] pinned*: Option[Tweet]
tweets*: Timeline tweets*: Timeline
accountInfo*: AccountInfo
List* = object List* = object
id*: string id*: string
@@ -331,19 +235,10 @@ type
hmacKey*: string hmacKey*: string
base64Media*: bool base64Media*: bool
minTokens*: int minTokens*: int
enableRSSUserTweets*: bool enableRss*: bool
enableRSSUserReplies*: bool
enableRSSUserMedia*: bool
enableRSSSearch*: bool
enableRSSList*: bool
enableDebug*: bool enableDebug*: bool
proxy*: string proxy*: string
proxyAuth*: string proxyAuth*: string
apiProxy*: string
disableTid*: bool
maxConcurrentReqs*: int
maxRetries*: int
retryDelayMs*: int
rssCacheTime*: int rssCacheTime*: int
listCacheTime*: int listCacheTime*: int
@@ -359,27 +254,3 @@ type
proc contains*(thread: Chain; tweet: Tweet): bool = proc contains*(thread: Chain; tweet: Tweet): bool =
thread.content.anyIt(it.id == tweet.id) thread.content.anyIt(it.id == tweet.id)
proc add*(timeline: var seq[Tweets]; tweet: Tweet) =
timeline.add @[tweet]
proc getPhotos*(tweet: Tweet): seq[Photo] =
tweet.media.filterIt(it.kind == photoMedia).mapIt(it.photo)
proc getVideos*(tweet: Tweet): seq[Video] =
tweet.media.filterIt(it.kind == videoMedia).mapIt(it.video)
proc hasPhotos*(tweet: Tweet): bool =
tweet.media.anyIt(it.kind == photoMedia)
proc hasVideos*(tweet: Tweet): bool =
tweet.media.anyIt(it.kind == videoMedia)
proc hasGifs*(tweet: Tweet): bool =
tweet.media.anyIt(it.kind == gifMedia)
proc getThumb*(media: Media): string =
case media.kind
of photoMedia: media.photo.url
of videoMedia: media.video.thumb
of gifMedia: media.gif.thumb

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import sequtils, strutils, strformat, uri, tables, base64 import strutils, strformat, uri, tables, base64
import nimcrypto import nimcrypto
var var
@@ -9,17 +9,14 @@ var
const const
https* = "https://" https* = "https://"
twimg* = "pbs.twimg.com/" twimg* = "pbs.twimg.com/"
nitterParams* = ["name", "tab", "id", "list", "referer", "scroll", "prefs"] nitterParams = ["name", "tab", "id", "list", "referer", "scroll"]
twitterDomains = @[ twitterDomains = @[
"twitter.com", "twitter.com",
"pic.twitter.com", "pic.twitter.com",
"twimg.com", "twimg.com",
"abs.twimg.com", "abs.twimg.com",
"pbs.twimg.com", "pbs.twimg.com",
"video.twimg.com", "video.twimg.com"
"x.com",
"pscp.tv",
"video.pscp.tv"
] ]
proc setHmacKey*(key: string) = proc setHmacKey*(key: string) =
@@ -45,25 +42,13 @@ proc getPicUrl*(link: string): string =
else: else:
&"/pic/{encodeUrl(link)}" &"/pic/{encodeUrl(link)}"
proc getOrigPicUrl*(link: string): string =
if base64Media:
&"/pic/orig/enc/{encode(link, safe=true)}"
else:
&"/pic/orig/{encodeUrl(link)}"
proc filterParams*(params: Table): seq[(string, string)] = proc filterParams*(params: Table): seq[(string, string)] =
for p in params.pairs(): for p in params.pairs():
if p[1].len > 0 and p[0] notin nitterParams: if p[1].len > 0 and p[0] notin nitterParams:
result.add p result.add p
proc isTwitterUrl*(uri: Uri): bool = proc isTwitterUrl*(uri: Uri): bool =
uri.hostname in twitterDomains or uri.hostname in twitterDomains
uri.hostname.endsWith(".video.pscp.tv")
proc isTwitterUrl*(url: string): bool = proc isTwitterUrl*(url: string): bool =
isTwitterUrl(parseUri(url)) parseUri(url).hostname in twitterDomains
proc validateNumber*(value: string): string =
if value.anyIt(not it.isDigit):
return ""
return value

View File

@@ -1,93 +0,0 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, times
import karax/[karaxdsl, vdom]
import renderutils
import ".."/[types, formatters]
proc renderAboutAccount*(info: AccountInfo): VNode =
let user = User(
username: info.username,
fullname: info.fullname,
userPic: info.userPic,
verifiedType: info.verifiedType
)
buildHtml(tdiv(class="about-account")):
tdiv(class="about-account-header"):
a(class="about-account-avatar", href=(&"/{info.username}")):
genImg(getUserPic(info.userPic, "_200x200"))
tdiv(class="about-account-name"):
linkUser(user, class="profile-card-fullname")
verifiedIcon(user)
linkUser(user, class="profile-card-username")
tdiv(class="about-account-body"):
tdiv(class="about-account-row"):
span: icon "calendar"
tdiv:
span(class="about-account-label"): text "Date joined"
span(class="about-account-value"):
text info.joinDate.format("MMMM YYYY")
if info.basedIn.len > 0:
tdiv(class="about-account-row"):
span: icon "location"
tdiv:
span(class="about-account-label"): text "Account based in"
span(class="about-account-value"): text info.basedIn
if info.verifiedType != VerifiedType.none:
if info.overrideVerifiedYear != 0:
tdiv(class="about-account-row"):
span: icon "ok"
tdiv:
span(class="about-account-label"): text "Verified"
span(class="about-account-value"):
let year = abs(info.overrideVerifiedYear)
let era = if info.overrideVerifiedYear < 0: " BCE" else: ""
text "Since " & $year & era
elif info.verifiedSince.year > 0:
tdiv(class="about-account-row"):
span: icon "ok"
tdiv:
span(class="about-account-label"): text "Verified"
span(class="about-account-value"):
text "Since " & info.verifiedSince.format("MMMM YYYY")
if info.isIdentityVerified:
tdiv(class="about-account-row"):
span: icon "ok"
tdiv:
span(class="about-account-label"): text "ID Verified"
span(class="about-account-value"): text "Yes"
if info.affiliateUsername.len > 0:
tdiv(class="about-account-row"):
span: icon "group"
tdiv:
span(class="about-account-label"): text "An affiliate of"
span(class="about-account-value"):
a(href=(&"/{info.affiliateUsername}")):
if info.affiliateLabel.len > 0:
text info.affiliateLabel & " (@" & info.affiliateUsername & ")"
else:
text "@" & info.affiliateUsername
if info.usernameChanges > 0:
tdiv(class="about-account-row"):
span(class="about-account-at"): text "@"
tdiv:
span(class="about-account-label"):
text $info.usernameChanges & " username change"
if info.usernameChanges > 1: text "s"
if info.lastUsernameChange.year > 0:
span(class="about-account-value"):
text "Last on " & info.lastUsernameChange.format("MMMM YYYY")
if info.source.len > 0:
tdiv(class="about-account-row"):
span: icon "link"
tdiv:
span(class="about-account-label"): text "Connected via"
span(class="about-account-value"): text info.source

View File

@@ -1,75 +0,0 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, times
import karax/[karaxdsl, vdom]
import renderutils
import ".."/[types, utils, formatters]
proc renderBroadcast*(bc: Broadcast; prefs: Prefs; path: string): VNode =
let
isLive = bc.state == "RUNNING"
thumb = getPicUrl(bc.thumb)
source = if prefs.proxyVideos and bc.m3u8Url.startsWith("http"):
getVidUrl(bc.m3u8Url) else: bc.m3u8Url
stateText =
if isLive: "LIVE"
elif bc.endTime.year > 1: "Ended " & bc.endTime.format("MMM d, YYYY")
elif bc.state.len > 0: bc.state
else: "Ended"
durationMs =
if bc.startTime.year > 1 and bc.endTime.year > 1:
int((bc.endTime - bc.startTime).inMilliseconds) - bc.replayStart * 1000
else: 0
duration = if durationMs > 0: getDuration(durationMs) else: ""
buildHtml(tdiv(class="broadcast-page")):
tdiv(class="broadcast-panel"):
tdiv(class="broadcast-player"):
if bc.m3u8Url.len > 0 and prefs.hlsPlayback:
video(poster=thumb, data-url=source, data-autoload="false",
data-start=($bc.replayStart), muted=prefs.muteVideos)
verbatim "<div class=\"video-overlay\" onclick=\"playVideo(this)\">"
tdiv(class="overlay-circle"): span(class="overlay-triangle")
if isLive:
tdiv(class="broadcast-live"): text "LIVE"
elif duration.len > 0:
tdiv(class="overlay-duration"): text duration
verbatim "</div>"
elif bc.m3u8Url.len > 0:
img(src=thumb, alt=bc.title)
tdiv(class="video-overlay"):
buttonReferer "/enablehls", "Enable hls playback", path
if isLive:
tdiv(class="broadcast-live"): text "LIVE"
elif duration.len > 0:
tdiv(class="overlay-duration"): text duration
elif bc.thumb.len > 0:
img(src=thumb, alt=bc.title)
tdiv(class="video-overlay"):
if bc.availableForReplay:
p: text "Stream unavailable"
else:
p: text "Replay is not available"
else:
tdiv(class="video-overlay"):
p: text "Broadcast not found"
tdiv(class="broadcast-info"):
h2(class="broadcast-title"): text bc.title
tdiv(class="broadcast-user-row"):
a(class="broadcast-user", href=("/" & bc.user.username)):
genImg(getUserPic(bc.user.userPic, "_bigger"))
tdiv:
tdiv:
strong: text bc.user.fullname
verifiedIcon(bc.user)
span(class="broadcast-username"): text "@" & bc.user.username
tdiv(class="broadcast-meta"):
if bc.totalWatched > 0:
span: text insertSep($bc.totalWatched, ',') & " views"
if isLive:
span(class="broadcast-live"): text stateText
else:
span: text stateText

View File

@@ -9,17 +9,13 @@ import general, tweet
const doctype = "<!DOCTYPE html>\n" const doctype = "<!DOCTYPE html>\n"
proc renderVideoEmbed*(tweet: Tweet; cfg: Config; req: Request): string = proc renderVideoEmbed*(tweet: Tweet; cfg: Config; req: Request): string =
let let thumb = get(tweet.video).thumb
video = tweet.getVideos()[0] let vidUrl = getVideoEmbed(cfg, tweet.id)
thumb = video.thumb let prefs = Prefs(hlsPlayback: true)
vidUrl = getVideoEmbed(cfg, tweet.id)
prefs = Prefs(hlsPlayback: true, mp4Playback: true)
let node = buildHtml(html(lang="en")): let node = buildHtml(html(lang="en")):
renderHead(prefs, cfg, req, video=vidUrl, images=(@[thumb])) renderHead(prefs, cfg, req, video=vidUrl, images=(@[thumb]))
body: tdiv(class="embed-video"):
tdiv(class="embed-video"): renderVideo(get(tweet.video), prefs, "")
renderVideo(video, prefs, "")
result = doctype & $node result = doctype & $node

View File

@@ -29,17 +29,19 @@ proc renderNavbar(cfg: Config; req: Request; rss, canonical: string): VNode =
tdiv(class="nav-item right"): tdiv(class="nav-item right"):
icon "search", title="Search", href="/search" icon "search", title="Search", href="/search"
if rss.len > 0: if cfg.enableRss and rss.len > 0:
icon "rss", title="RSS Feed", href=rss icon "rss-feed", title="RSS Feed", href=rss
icon "bird", title="Open in X", href=canonical icon "bird", title="Open in Twitter", href=canonical
a(href="https://liberapay.com/zedeus"): verbatim lp a(href="https://liberapay.com/zedeus"): verbatim lp
icon "info", title="About", href="/about" icon "info", title="About", href="/about"
icon "cog", title="Preferences", href=("/settings?referer=" & encodeUrl(path)) icon "cog", title="Preferences", href=("/settings?referer=" & encodeUrl(path))
proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc=""; proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
video=""; images: seq[string] = @[]; banner=""; ogTitle=""; video=""; images: seq[string] = @[]; banner=""; ogTitle="";
rss=""; alternate=""): VNode = rss=""; canonical=""): VNode =
let theme = prefs.theme.toTheme var theme = prefs.theme.toTheme
if "theme" in req.params:
theme = req.params["theme"].toTheme
let ogType = let ogType =
if video.len > 0: "video" if video.len > 0: "video"
@@ -50,8 +52,8 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
let opensearchUrl = getUrlPrefix(cfg) & "/opensearch" let opensearchUrl = getUrlPrefix(cfg) & "/opensearch"
buildHtml(head): buildHtml(head):
link(rel="stylesheet", type="text/css", href="/css/style.css?v=35") link(rel="stylesheet", type="text/css", href="/css/style.css?v=16")
link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=5") link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2")
if theme.len > 0: if theme.len > 0:
link(rel="stylesheet", type="text/css", href=(&"/css/themes/{theme}.css")) link(rel="stylesheet", type="text/css", href=(&"/css/themes/{theme}.css"))
@@ -64,14 +66,14 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
link(rel="search", type="application/opensearchdescription+xml", title=cfg.title, link(rel="search", type="application/opensearchdescription+xml", title=cfg.title,
href=opensearchUrl) href=opensearchUrl)
if alternate.len > 0: if canonical.len > 0:
link(rel="alternate", href=alternate, title="View on X") link(rel="canonical", href=canonical)
if rss.len > 0: if cfg.enableRss and rss.len > 0:
link(rel="alternate", type="application/rss+xml", href=rss, title="RSS feed") link(rel="alternate", type="application/rss+xml", href=rss, title="RSS feed")
if prefs.hlsPlayback: if prefs.hlsPlayback:
script(src="/js/hls.min.js", `defer`="") script(src="/js/hls.light.min.js", `defer`="")
script(src="/js/hlsPlayback.js", `defer`="") script(src="/js/hlsPlayback.js", `defer`="")
if prefs.infiniteScroll: if prefs.infiniteScroll:
@@ -91,13 +93,14 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
meta(property="og:site_name", content="Nitter") meta(property="og:site_name", content="Nitter")
meta(property="og:locale", content="en_US") meta(property="og:locale", content="en_US")
if banner.len > 0 and not banner.startsWith('#'): if banner.len > 0:
let bannerUrl = getPicUrl(banner) let bannerUrl = getPicUrl(banner)
link(rel="preload", type="image/png", href=bannerUrl, `as`="image") link(rel="preload", type="image/png", href=bannerUrl, `as`="image")
for url in images: for url in images:
let preloadUrl = if "400x400" in url: getPicUrl(url) let suffix = if "400x400" in url or url.endsWith("placeholder.png"): ""
else: getSmallPic(url) else: "?name=small"
let preloadUrl = getPicUrl(url & suffix)
link(rel="preload", type="image/png", href=preloadUrl, `as`="image") link(rel="preload", type="image/png", href=preloadUrl, `as`="image")
let image = getUrlPrefix(cfg) & getPicUrl(url) let image = getUrlPrefix(cfg) & getPicUrl(url)
@@ -117,21 +120,20 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
# this is last so images are also preloaded # this is last so images are also preloaded
# if this is done earlier, Chrome only preloads one image for some reason # if this is done earlier, Chrome only preloads one image for some reason
link(rel="preload", type="font/woff2", `as`="font", link(rel="preload", type="font/woff2", `as`="font",
href="/fonts/fontello.woff2?61663884", crossorigin="anonymous") href="/fonts/fontello.woff2?21002321", crossorigin="anonymous")
proc renderMain*(body: VNode; req: Request; cfg: Config; prefs=defaultPrefs; proc renderMain*(body: VNode; req: Request; cfg: Config; prefs=defaultPrefs;
titleText=""; desc=""; ogTitle=""; rss=""; video=""; titleText=""; desc=""; ogTitle=""; rss=""; video="";
images: seq[string] = @[]; banner=""): string = images: seq[string] = @[]; banner=""): string =
let twitterLink = getTwitterLink(req.path, req.params) let canonical = getTwitterLink(req.path, req.params)
let node = buildHtml(html(lang="en")): let node = buildHtml(html(lang="en")):
renderHead(prefs, cfg, req, titleText, desc, video, images, banner, ogTitle, renderHead(prefs, cfg, req, titleText, desc, video, images, banner, ogTitle,
rss, twitterLink) rss, canonical)
let bodyClass = if prefs.stickyNav: "fixed-nav" else: "" body:
body(class=bodyClass): renderNavbar(cfg, req, rss, canonical)
renderNavbar(cfg, req, rss, twitterLink)
tdiv(class="container"): tdiv(class="container"):
body body

View File

@@ -32,8 +32,7 @@ macro renderPrefs*(): untyped =
result[2].add stmt result[2].add stmt
proc renderPreferences*(prefs: Prefs; path: string; themes: seq[string]; proc renderPreferences*(prefs: Prefs; path: string; themes: seq[string]): VNode =
prefsUrl: string): VNode =
buildHtml(tdiv(class="overlay-panel")): buildHtml(tdiv(class="overlay-panel")):
fieldset(class="preferences"): fieldset(class="preferences"):
form(`method`="post", action="/saveprefs", autocomplete="off"): form(`method`="post", action="/saveprefs", autocomplete="off"):
@@ -41,14 +40,6 @@ proc renderPreferences*(prefs: Prefs; path: string; themes: seq[string];
renderPrefs() renderPrefs()
legend: text "Bookmark"
p(class="bookmark-note"):
text "Save this URL to restore your preferences (?prefs works on all pages)"
pre(class="prefs-code"):
text prefsUrl
p(class="bookmark-note"):
verbatim "You can override preferences with query parameters (e.g. <code>?hlsPlayback=on</code>). These overrides aren't saved to cookies, and links won't retain the parameters. Intended for configuring RSS feeds and other cookieless environments. Hover over a preference to see its name."
h4(class="note"): h4(class="note"):
text "Preferences are stored client-side using cookies without any personal information." text "Preferences are stored client-side using cookies without any personal information."

Some files were not shown because too many files have changed in this diff Show More