1
0
mirror of https://github.com/zedeus/nitter.git synced 2025-12-06 03:55:36 -05:00

1 Commits

Author SHA1 Message Date
Zed
c9b261a793 WIP tweets/timeline parser 2022-01-30 23:38:39 +01:00
66 changed files with 940 additions and 1155 deletions

View File

@@ -1,4 +1,4 @@
name: Docker name: CI/CD
on: on:
push: push:
@@ -8,55 +8,31 @@ on:
- master - master
jobs: jobs:
tests: build-docker:
uses: ./.github/workflows/run-tests.yml runs-on: ubuntu-latest
build-docker-amd64:
needs: [tests]
runs-on: buildjet-2vcpu-ubuntu-2204
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
with:
platforms: all
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v1
with: with:
version: latest version: latest
- name: Login to DockerHub - name: Login to DockerHub
uses: docker/login-action@v2 uses: docker/login-action@v1
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }} password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push AMD64 Docker image - name: Build and push
uses: docker/build-push-action@v3 uses: docker/build-push-action@v2
with: with:
context: . context: .
file: ./Dockerfile file: ./Dockerfile
platforms: linux/amd64 platforms: linux/amd64
push: true push: true
tags: zedeus/nitter:latest,zedeus/nitter:${{ github.sha }} tags: zedeus/nitter:latest,zedeus/nitter:${{ github.sha }}
build-docker-arm64:
needs: [tests]
runs-on: buildjet-2vcpu-ubuntu-2204-arm
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
with:
version: latest
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push ARM64 Docker image
uses: docker/build-push-action@v3
with:
context: .
file: ./Dockerfile.arm64
platforms: linux/arm64
push: true
tags: zedeus/nitter:latest-arm64,zedeus/nitter:${{ github.sha }}-arm64

View File

@@ -1,45 +0,0 @@
name: Tests
on:
push:
paths-ignore:
- "*.md"
branches-ignore:
- master
workflow_call:
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Cache nimble
id: cache-nimble
uses: actions/cache@v3
with:
path: ~/.nimble
key: nimble-${{ hashFiles('*.nimble') }}
restore-keys: "nimble-"
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- uses: jiro4989/setup-nim-action@v1
with:
nim-version: "1.x"
- run: nimble build -d:release -Y
- run: pip install seleniumbase
- run: seleniumbase install chromedriver
- uses: supercharge/redis-github-action@1.5.0
- name: Prepare Nitter
run: |
sudo apt install libsass-dev -y
cp nitter.example.conf nitter.conf
nimble md
nimble scss
- name: Run tests
run: |
./nitter &
pytest -n4 tests

4
.gitignore vendored
View File

@@ -3,11 +3,9 @@ nitter
*.db *.db
/tests/__pycache__ /tests/__pycache__
/tests/geckodriver.log /tests/geckodriver.log
/tests/downloaded_files /tests/downloaded_files/*
/tests/latest_logs
/tools/gencss /tools/gencss
/tools/rendermd /tools/rendermd
/public/css/style.css /public/css/style.css
/public/md/*.html /public/md/*.html
nitter.conf nitter.conf
dump.rdb

View File

@@ -1,5 +1,6 @@
FROM nimlang/nim:1.6.10-alpine-regular as nim FROM nimlang/nim:1.6.2-alpine-regular as nim
LABEL maintainer="setenforce@protonmail.com" LABEL maintainer="setenforce@protonmail.com"
EXPOSE 8080
RUN apk --no-cache add libsass-dev pcre RUN apk --no-cache add libsass-dev pcre
@@ -15,11 +16,8 @@ RUN nimble build -d:danger -d:lto -d:strip \
FROM alpine:latest FROM alpine:latest
WORKDIR /src/ WORKDIR /src/
RUN apk --no-cache add pcre ca-certificates RUN apk --no-cache add pcre
COPY --from=nim /src/nitter/nitter ./ COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public COPY --from=nim /src/nitter/public ./public
EXPOSE 8080
RUN adduser -h /src/ -D -s /bin/sh nitter
USER nitter
CMD ./nitter CMD ./nitter

View File

@@ -1,23 +0,0 @@
FROM alpine:3.17 as nim
LABEL maintainer="setenforce@protonmail.com"
RUN apk --no-cache add gcc git libc-dev libsass-dev "nim=1.6.8-r0" nimble pcre
WORKDIR /src/nitter
COPY nitter.nimble .
RUN nimble install -y --depsOnly
COPY . .
RUN nimble build -d:danger -d:lto -d:strip \
&& nimble scss \
&& nimble md
FROM alpine:3.17
WORKDIR /src/
RUN apk --no-cache add ca-certificates pcre openssl1.1-compat
COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public
EXPOSE 8080
CMD ./nitter

View File

@@ -1,7 +1,6 @@
# Nitter # Nitter
[![Test Matrix](https://github.com/zedeus/nitter/workflows/Tests/badge.svg)](https://github.com/zedeus/nitter/actions/workflows/run-tests.yml) [![Test Matrix](https://github.com/zedeus/nitter/workflows/CI/CD/badge.svg)](https://github.com/zedeus/nitter/actions?query=workflow%3ACI/CD)
[![Test Matrix](https://github.com/zedeus/nitter/workflows/Docker/badge.svg)](https://github.com/zedeus/nitter/actions/workflows/build-docker.yml)
[![License](https://img.shields.io/github/license/zedeus/nitter?style=flat)](#license) [![License](https://img.shields.io/github/license/zedeus/nitter?style=flat)](#license)
A free and open source alternative Twitter front-end focused on privacy and A free and open source alternative Twitter front-end focused on privacy and
@@ -35,7 +34,7 @@ XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscW
## Resources ## Resources
The wiki contains The wiki contains
[a list of instances](https://github.com/zedeus/nitter/wiki/Instances) and [a list of instances](https://github.com/zedeus/nitter/wiki/Instances) and
[browser extensions](https://github.com/zedeus/nitter/wiki/Extensions) [browser extensions](https://github.com/zedeus/nitter/wiki/Extensions)
maintained by the community. maintained by the community.
@@ -68,10 +67,9 @@ Twitter account.
## Installation ## Installation
### Dependencies ### Dependencies
* libpcre
- libpcre * libsass
- libsass * redis
- redis
To compile Nitter you need a Nim installation, see To compile Nitter you need a Nim installation, see
[nim-lang.org](https://nim-lang.org/install.html) for details. It is possible to [nim-lang.org](https://nim-lang.org/install.html) for details. It is possible to
@@ -110,32 +108,25 @@ performance reasons.
### Docker ### Docker
Page for the Docker image: https://hub.docker.com/r/zedeus/nitter #### NOTE: For ARM64/ARM support, please use [unixfox's image](https://quay.io/repository/unixfox/nitter?tab=tags), more info [here](https://github.com/zedeus/nitter/issues/399#issuecomment-997263495)
#### NOTE: For ARM64 support, please use the separate ARM64 docker image: [`zedeus/nitter:latest-arm64`](https://hub.docker.com/r/zedeus/nitter/tags).
To run Nitter with Docker, you'll need to install and run Redis separately To run Nitter with Docker, you'll need to install and run Redis separately
before you can run the container. See below for how to also run Redis using before you can run the container. See below for how to also run Redis using
Docker. Docker.
To build and run Nitter in Docker: To build and run Nitter in Docker:
```bash ```bash
docker build -t nitter:latest . docker build -t nitter:latest .
docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host nitter:latest docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host nitter:latest
``` ```
Note: For ARM64, use this Dockerfile: [`Dockerfile.arm64`](https://github.com/zedeus/nitter/blob/master/Dockerfile.arm64).
A prebuilt Docker image is provided as well: A prebuilt Docker image is provided as well:
```bash ```bash
docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host zedeus/nitter:latest docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host zedeus/nitter:latest
``` ```
Using docker-compose to run both Nitter and Redis as different containers: Using docker-compose to run both Nitter and Redis as different containers:
Change `redisHost` from `localhost` to `nitter-redis` in `nitter.conf`, then run: Change `redisHost` from `localhost` to `nitter-redis` in `nitter.conf`, then run:
```bash ```bash
docker-compose up -d docker-compose up -d
``` ```

View File

@@ -1,6 +1,5 @@
--define:ssl --define:ssl
--define:useStdLib --define:useStdLib
--threads:off
# workaround httpbeast file upload bug # workaround httpbeast file upload bug
--assertions:off --assertions:off

View File

@@ -8,21 +8,10 @@ services:
ports: ports:
- "127.0.0.1:8080:8080" # Replace with "8080:8080" if you don't use a reverse proxy - "127.0.0.1:8080:8080" # Replace with "8080:8080" if you don't use a reverse proxy
volumes: volumes:
- ./nitter.conf:/src/nitter.conf:Z,ro - ./nitter.conf:/src/nitter.conf:ro
depends_on: depends_on:
- nitter-redis - nitter-redis
restart: unless-stopped restart: unless-stopped
healthcheck:
test: wget -nv --tries=1 --spider http://127.0.0.1:8080/Jack/status/20 || exit 1
interval: 30s
timeout: 5s
retries: 2
user: "998:998"
read_only: true
security_opt:
- no-new-privileges:true
cap_drop:
- ALL
nitter-redis: nitter-redis:
image: redis:6-alpine image: redis:6-alpine
@@ -31,17 +20,6 @@ services:
volumes: volumes:
- nitter-redis:/data - nitter-redis:/data
restart: unless-stopped restart: unless-stopped
healthcheck:
test: redis-cli ping
interval: 30s
timeout: 5s
retries: 2
user: "999:1000"
read_only: true
security_opt:
- no-new-privileges:true
cap_drop:
- ALL
volumes: volumes:
nitter-redis: nitter-redis:

View File

@@ -1,11 +1,11 @@
[Server] [Server]
hostname = "nitter.net" # for generating links, change this to your own domain/ip
title = "nitter"
address = "0.0.0.0" address = "0.0.0.0"
port = 8080 port = 8080
https = false # disable to enable cookies when not using https https = false # disable to enable cookies when not using https
httpMaxConnections = 100 httpMaxConnections = 100
staticDir = "./public" staticDir = "./public"
title = "nitter"
hostname = "nitter.net"
[Cache] [Cache]
listMinutes = 240 # how long to cache list info (not the tweets, so keep it high) listMinutes = 240 # how long to cache list info (not the tweets, so keep it high)
@@ -13,9 +13,9 @@ rssMinutes = 10 # how long to cache rss queries
redisHost = "localhost" # Change to "nitter-redis" if using docker-compose redisHost = "localhost" # Change to "nitter-redis" if using docker-compose
redisPort = 6379 redisPort = 6379
redisPassword = "" redisPassword = ""
redisConnections = 20 # minimum open connections in pool redisConnections = 20 # connection pool size
redisMaxConnections = 30 redisMaxConnections = 30
# new connections are opened when none are available, but if the pool size # max, new connections are opened when none are available, but if the pool size
# goes above this, they're closed when released. don't worry about this unless # goes above this, they're closed when released. don't worry about this unless
# you receive tons of requests per second # you receive tons of requests per second
@@ -23,22 +23,23 @@ redisMaxConnections = 30
hmacKey = "secretkey" # random key for cryptographic signing of video urls hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds enableRSS = true # set this to false to disable RSS feeds
enableDebug = false # enable request logs and debug endpoints (/.tokens) enableDebug = false # enable request logs and debug endpoints
proxy = "" # http/https url, SOCKS proxies are not supported proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = "" proxyAuth = ""
tokenCount = 10 tokenCount = 10
# minimum amount of usable tokens. tokens are used to authorize API requests, # minimum amount of usable tokens. tokens are used to authorize API requests,
# but they expire after ~1 hour, and have a limit of 500 requests per endpoint. # but they expire after ~1 hour, and have a limit of 187 requests.
# the limits reset every 15 minutes, and the pool is filled up so there's # the limit gets reset every 15 minutes, and the pool is filled up so there's
# always at least `tokenCount` usable tokens. only increase this if you receive # always at least $tokenCount usable tokens. again, only increase this if
# major bursts all the time and don't have a rate limiting setup via e.g. nginx # you receive major bursts all the time
# Change default preferences here, see src/prefs_impl.nim for a complete list # Change default preferences here, see src/prefs_impl.nim for a complete list
[Preferences] [Preferences]
theme = "Nitter" theme = "Nitter"
replaceTwitter = "nitter.net" replaceTwitter = "nitter.net"
replaceYouTube = "piped.video" replaceYouTube = "piped.kavin.rocks"
replaceReddit = "teddit.net" replaceReddit = "teddit.net"
replaceInstagram = ""
proxyVideos = true proxyVideos = true
hlsPlayback = false hlsPlayback = false
infiniteScroll = false infiniteScroll = false

View File

@@ -11,18 +11,18 @@ bin = @["nitter"]
# Dependencies # Dependencies
requires "nim >= 1.4.8" requires "nim >= 1.4.8"
requires "jester#baca3f" requires "jester >= 0.5.0"
requires "karax#5cf360c" requires "karax#c71bc92"
requires "sass#7dfdd03" requires "sass#e683aa1"
requires "nimcrypto#4014ef9" requires "nimcrypto#a5742a9"
requires "markdown#158efe3" requires "markdown#abdbe5e"
requires "packedjson#9e6fbb6" requires "packedjson#d11d167"
requires "supersnappy#6c94198" requires "supersnappy#2.1.1"
requires "redpool#8b7c1db" requires "redpool#8b7c1db"
requires "https://github.com/zedeus/redis#d0a0e6f" requires "https://github.com/zedeus/redis#d0a0e6f"
requires "zippy#ca5989a" requires "zippy#0.7.3"
requires "flatty#e668085" requires "flatty#0.2.3"
requires "jsony#ea811be" requires "jsony#d0e69bd"
# Tasks # Tasks

View File

@@ -1,41 +0,0 @@
body {
--bg_color: #282a36;
--fg_color: #f8f8f2;
--fg_faded: #818eb6;
--fg_dark: var(--fg_faded);
--fg_nav: var(--accent);
--bg_panel: #343746;
--bg_elements: #292b36;
--bg_overlays: #44475a;
--bg_hover: #2f323f;
--grey: var(--fg_faded);
--dark_grey: #44475a;
--darker_grey: #3d4051;
--darkest_grey: #363948;
--border_grey: #44475a;
--accent: #bd93f9;
--accent_light: #caa9fa;
--accent_dark: var(--accent);
--accent_border: #ff79c696;
--play_button: #ffb86c;
--play_button_hover: #ffc689;
--more_replies_dots: #bd93f9;
--error_red: #ff5555;
--verified_blue: var(--accent);
--icon_text: ##F8F8F2;
--tab: #6272a4;
--tab_selected: var(--accent);
--profile_stat: #919cbf;
}
.search-bar > form input::placeholder{
color: var(--fg_faded);
}

File diff suppressed because one or more lines are too long

View File

@@ -1,5 +0,0 @@
User-agent: *
Disallow: /
Crawl-delay: 1
User-agent: Twitterbot
Disallow:

View File

@@ -4,143 +4,122 @@ import packedjson
import types, query, formatters, consts, apiutils, parser import types, query, formatters, consts, apiutils, parser
import experimental/parser as newParser import experimental/parser as newParser
proc getGraphUser*(username: string): Future[User] {.async.} = proc getGraphUser*(id: string): Future[User] {.async.} =
if username.len == 0: return
let
variables = %*{"screen_name": username}
params = {"variables": $variables, "features": gqlFeatures}
js = await fetchRaw(graphUser ? params, Api.userScreenName)
result = parseGraphUser(js)
proc getGraphUserById*(id: string): Future[User] {.async.} =
if id.len == 0 or id.any(c => not c.isDigit): return if id.len == 0 or id.any(c => not c.isDigit): return
let let
variables = %*{"userId": id} variables = %*{"userId": id, "withSuperFollowsUserFields": true}
params = {"variables": $variables, "features": gqlFeatures} js = await fetchRaw(graphUser ? {"variables": $variables}, Api.userRestId)
js = await fetchRaw(graphUserById ? params, Api.userRestId)
result = parseGraphUser(js) result = parseGraphUser(js)
proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = userTweetsVariables % [id, cursor]
params = {"variables": variables, "features": gqlFeatures}
(url, apiId) = case kind
of TimelineKind.tweets: (graphUserTweets, Api.userTweets)
of TimelineKind.replies: (graphUserTweetsAndReplies, Api.userTweetsAndReplies)
of TimelineKind.media: (graphUserMedia, Api.userMedia)
js = await fetch(url ? params, apiId)
result = parseGraphTimeline(js, "user", after)
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = listTweetsVariables % [id, cursor]
params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphListTweets ? params, Api.listTweets)
result = parseGraphTimeline(js, "list", after)
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} = proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
let let
variables = %*{"screenName": name, "listSlug": list} variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false}
params = {"variables": $variables, "features": gqlFeatures} url = graphListBySlug ? {"variables": $variables}
result = parseGraphList(await fetch(graphListBySlug ? params, Api.listBySlug)) result = parseGraphList(await fetch(url, Api.listBySlug))
proc getGraphList*(id: string): Future[List] {.async.} = proc getGraphList*(id: string): Future[List] {.async.} =
let let
variables = %*{"listId": id} variables = %*{"listId": id, "withHighlightedLabel": false}
params = {"variables": $variables, "features": gqlFeatures} url = graphList ? {"variables": $variables}
result = parseGraphList(await fetch(graphListById ? params, Api.list)) result = parseGraphList(await fetch(url, Api.list))
proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} = proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
if list.id.len == 0: return if list.id.len == 0: return
var let
variables = %*{ variables = %*{
"listId": list.id, "listId": list.id,
"cursor": after,
"withSuperFollowsUserFields": false,
"withBirdwatchPivots": false, "withBirdwatchPivots": false,
"withDownvotePerspective": false, "withDownvotePerspective": false,
"withReactionsMetadata": false, "withReactionsMetadata": false,
"withReactionsPerspective": false "withReactionsPerspective": false,
"withSuperFollowsTweetFields": false
} }
if after.len > 0: url = graphListMembers ? {"variables": $variables}
variables["cursor"] = % after
let url = graphListMembers ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after) result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} = proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return if id.len == 0: return
let let
variables = tweetResultVariables % id ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
params = {"variables": variables, "features": gqlFeatures} url = listTimeline ? ps
js = await fetch(graphTweetResult ? params, Api.tweetResult) result = parseTimeline(await fetch(url, Api.timeline), after)
result = parseGraphTweetResult(js)
proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} = proc getUser*(username: string): Future[User] {.async.} =
if username.len == 0: return
let
ps = genParams({"screen_name": username})
json = await fetchRaw(userShow ? ps, Api.userShow)
result = parseUser(json, username)
proc getUserById*(userId: string): Future[User] {.async.} =
if userId.len == 0: return
let
ps = genParams({"user_id": userId})
json = await fetchRaw(userShow ? ps, Api.userShow)
result = parseUser(json)
proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
if id.len == 0: return if id.len == 0: return
let let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: "" ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
variables = tweetVariables % [id, cursor] url = timeline / (id & ".json") ? ps
params = {"variables": variables, "features": gqlFeatures} result = parseTimeline(await fetch(url, Api.timeline), after)
js = await fetch(graphTweet ? params, Api.tweetDetail)
result = parseGraphConversation(js, id)
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} = proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} =
result = (await getGraphTweet(id, after)).replies if id.len == 0: return
result.beginning = after.len == 0 let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
result = await getGraphTweet(id)
if after.len > 0:
result.replies = await getReplies(id, after)
proc getGraphSearch*(query: Query; after=""): Future[Result[Tweet]] {.async.} =
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[Tweet](query: query, beginning: true)
var
variables = %*{
"rawQuery": q,
"count": 20,
"product": "Latest",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch(await fetch(url, Api.search), after)
result.query = query
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
if query.text.len == 0:
return Result[User](query: query, beginning: true)
var url = userSearch ? {
"q": query.text,
"skip_status": "1",
"count": "20",
"page": page
}
result = parseUsers(await fetchRaw(url, Api.userSearch))
result.query = query
if page.len == 0:
result.bottom = "2"
elif page.allCharsInSet(Digits):
result.bottom = $(parseInt(page) + 1)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} = proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return if name.len == 0: return
let let
ps = genParams({"screen_name": name, "trim_user": "true"}, ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false) count="18", ext=false)
url = photoRail ? ps url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline)) result = parsePhotoRail(await fetch(url, Api.timeline))
proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
when T is User:
const
searchMode = ("result_filter", "user")
parse = parseUsers
fetchFunc = fetchRaw
else:
const
searchMode = ("tweet_search_mode", "live")
parse = parseTweets
fetchFunc = fetchRaw
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[T](beginning: true, query: query)
let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
try:
result = parse(await fetchFunc(url, Api.search), after)
result.query = query
except InternalError:
return Result[T](beginning: true, query: query)
proc getTweetImpl(id: string; after=""): Future[Conversation] {.async.} =
let url = tweet / (id & ".json") ? genParams(cursor=after)
result = parseConversation(await fetch(url, Api.tweet), id)
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} =
result = (await getTweetImpl(id, after)).replies
result.beginning = after.len == 0
proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
result = await getTweetImpl(id)
if after.len > 0:
result.replies = await getReplies(id, after)
proc getStatus*(id: string): Future[Tweet] {.async.} =
let url = status / (id & ".json") ? genParams()
result = parseStatus(await fetch(url, Api.status))
proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} = proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
let client = newAsyncHttpClient(maxRedirects=0) let client = newAsyncHttpClient(maxRedirects=0)
try: try:

View File

@@ -17,13 +17,13 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
result &= p result &= p
if ext: if ext:
result &= ("ext", "mediaStats") result &= ("ext", "mediaStats")
result &= ("include_ext_alt_text", "1") result &= ("include_ext_alt_text", "true")
result &= ("include_ext_media_availability", "1") result &= ("include_ext_media_availability", "true")
if count.len > 0: if count.len > 0:
result &= ("count", count) result &= ("count", count)
if cursor.len > 0: if cursor.len > 0:
# The raw cursor often has plus signs, which sometimes get turned into spaces, # The raw cursor often has plus signs, which sometimes get turned into spaces,
# so we need to turn them back into a plus # so we need to them back into a plus
if " " in cursor: if " " in cursor:
result &= ("cursor", cursor.replace(" ", "+")) result &= ("cursor", cursor.replace(" ", "+"))
else: else:
@@ -44,7 +44,7 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
}) })
template updateToken() = template updateToken() =
if resp.headers.hasKey(rlRemaining): if api != Api.search and resp.headers.hasKey(rlRemaining):
let let
remaining = parseInt(resp.headers[rlRemaining]) remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset]) reset = parseInt(resp.headers[rlReset])
@@ -61,15 +61,12 @@ template fetchImpl(result, fetchBody) {.dirty.} =
try: try:
var resp: AsyncResponse var resp: AsyncResponse
pool.use(genHeaders(token)): pool.use(genHeaders(token)):
template getContent = resp = await c.get($url)
resp = await c.get($url) result = await resp.body
result = await resp.body
getContent()
if resp.status == $Http503: if resp.status == $Http503:
badClient = true badClient = true
raise newException(BadClientError, "Bad client") raise newException(InternalError, result)
if result.len > 0: if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip": if resp.headers.getOrDefault("content-encoding") == "gzip":
@@ -85,9 +82,6 @@ template fetchImpl(result, fetchBody) {.dirty.} =
raise newException(InternalError, $url) raise newException(InternalError, $url)
except InternalError as e: except InternalError as e:
raise e raise e
except BadClientError as e:
release(token, used=true)
raise e
except Exception as e: except Exception as e:
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg: if "length" notin e.msg and "descriptor" notin e.msg:
@@ -106,7 +100,7 @@ proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} =
updateToken() updateToken()
let error = result.getError let error = result.getError
if error in {invalidToken, badToken}: if error in {invalidToken, forbidden, badToken}:
echo "fetch error: ", result.getError echo "fetch error: ", result.getError
release(token, invalid=true) release(token, invalid=true)
raise rateLimitError() raise rateLimitError()
@@ -121,7 +115,7 @@ proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} =
if result.startsWith("{\"errors"): if result.startsWith("{\"errors"):
let errors = result.fromJson(Errors) let errors = result.fromJson(Errors)
if errors in {invalidToken, badToken}: if errors in {invalidToken, forbidden, badToken}:
echo "fetch error: ", errors echo "fetch error: ", errors
release(token, invalid=true) release(token, invalid=true)
raise rateLimitError() raise rateLimitError()

View File

@@ -1,28 +1,28 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import uri, sequtils, strutils import uri, sequtils
const const
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA" auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAPYXBAAAAAAACLXUNDekMxqa8h%2F40K4moUkGsoc%3DTYfbDKbT3jJPCEVnMYqilB28NHfOPqkca3qaAxGfsyKCs0wRbw"
api = parseUri("https://api.twitter.com") api = parseUri("https://api.twitter.com")
activate* = $(api / "1.1/guest/activate.json") activate* = $(api / "1.1/guest/activate.json")
userShow* = api / "1.1/users/show.json"
photoRail* = api / "1.1/statuses/media_timeline.json" photoRail* = api / "1.1/statuses/media_timeline.json"
userSearch* = api / "1.1/users/search.json" status* = api / "1.1/statuses/show"
search* = api / "2/search/adaptive.json"
timelineApi = api / "2/timeline"
timeline* = timelineApi / "profile"
mediaTimeline* = timelineApi / "media"
listTimeline* = timelineApi / "list.json"
tweet* = timelineApi / "conversation"
graphql = api / "graphql" graphql = api / "graphql"
graphUser* = graphql / "pVrmNaXcxPjisIvKtLDMEA/UserByScreenName" graphUser* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId"
graphUserById* = graphql / "1YAM811Q8Ry4XyPpJclURQ/UserByRestId" graphList* = graphql / "JADTh6cjebfgetzvF3tQvQ/List"
graphUserTweets* = graphql / "WzJjibAcDa-oCjCcLOotcg/UserTweets" graphListBySlug* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug"
graphUserTweetsAndReplies* = graphql / "fn9oRltM1N4thkh5CVusPg/UserTweetsAndReplies" graphListMembers* = graphql / "Ke6urWMeCV2UlKXGRy4sow/ListMembers"
graphUserMedia* = graphql / "qQoeS7szGavsi8-ehD2AWg/UserMedia"
graphTweet* = graphql / "miKSMGb2R1SewIJv2-ablQ/TweetDetail"
graphTweetResult* = graphql / "0kc0a_7TTr3dvweZlMslsQ/TweetResultByRestId"
graphSearchTimeline* = graphql / "gkjsKepM6gl_HmFWoWKfgg/SearchTimeline"
graphListById* = graphql / "iTpgCtbdxrsJfyx0cFjHqg/ListByRestId"
graphListBySlug* = graphql / "-kmqNvm5Y-cVrfvBy6docg/ListBySlug"
graphListMembers* = graphql / "P4NpVZDqUD_7MEM84L-8nw/ListMembers"
graphListTweets* = graphql / "jZntL0oVJSdjhmPcdbw_eA/ListLatestTweetsTimeline"
timelineParams* = { timelineParams* = {
"include_profile_interstitial_type": "0", "include_profile_interstitial_type": "0",
@@ -33,89 +33,27 @@ const
"include_mute_edge": "0", "include_mute_edge": "0",
"include_can_dm": "0", "include_can_dm": "0",
"include_can_media_tag": "1", "include_can_media_tag": "1",
"include_ext_is_blue_verified": "1",
"skip_status": "1", "skip_status": "1",
"cards_platform": "Web-12", "cards_platform": "Web-12",
"include_cards": "1", "include_cards": "1",
"include_composer_source": "0", "include_composer_source": "false",
"include_reply_count": "1", "include_reply_count": "1",
"tweet_mode": "extended", "tweet_mode": "extended",
"include_entities": "1", "include_entities": "true",
"include_user_entities": "1", "include_user_entities": "true",
"include_ext_media_color": "0", "include_ext_media_color": "false",
"send_error_codes": "1", "send_error_codes": "true",
"simple_quoted_tweet": "1", "simple_quoted_tweet": "true",
"include_quote_count": "1" "include_quote_count": "true"
}.toSeq }.toSeq
gqlFeatures* = """{ searchParams* = {
"blue_business_profile_image_shape_enabled": false, "query_source": "typed_query",
"creator_subscriptions_tweet_preview_api_enabled": true, "pc": "1",
"freedom_of_speech_not_reach_fetch_enabled": false, "spelling_corrections": "1"
"graphql_is_translatable_rweb_tweet_is_translatable_enabled": false, }.toSeq
"highlights_tweets_tab_ui_enabled": false, ## top: nothing
"interactive_text_enabled": false, ## latest: "tweet_search_mode: live"
"longform_notetweets_consumption_enabled": true, ## user: "result_filter: user"
"longform_notetweets_inline_media_enabled": false, ## photos: "result_filter: photos"
"longform_notetweets_richtext_consumption_enabled": true, ## videos: "result_filter: videos"
"longform_notetweets_rich_text_read_enabled": false,
"responsive_web_edit_tweet_api_enabled": false,
"responsive_web_enhance_cards_enabled": false,
"responsive_web_graphql_exclude_directive_enabled": true,
"responsive_web_graphql_skip_user_profile_image_extensions_enabled": false,
"responsive_web_graphql_timeline_navigation_enabled": false,
"responsive_web_text_conversations_enabled": false,
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
"rweb_lists_timeline_redesign_enabled": true,
"spaces_2022_h2_clipping": true,
"spaces_2022_h2_spaces_communities": true,
"standardized_nudges_misinfo": false,
"tweet_awards_web_tipping_enabled": false,
"tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": false,
"tweetypie_unmention_optimization_enabled": false,
"verified_phone_label_enabled": false,
"vibe_api_enabled": false,
"view_counts_everywhere_api_enabled": false
}""".replace(" ", "").replace("\n", "")
tweetVariables* = """{
"focalTweetId": "$1",
$2
"withBirdwatchNotes": false,
"includePromotedContent": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withVoice": false
}"""
tweetResultVariables* = """{
"tweetId": "$1",
"includePromotedContent": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withVoice": false,
"withCommunity": false
}"""
userTweetsVariables* = """{
"userId": "$1", $2
"count": 20,
"includePromotedContent": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withVoice": false,
"withV2Timeline": true
}"""
listTweetsVariables* = """{
"listId": "$1", $2
"count": 20,
"includePromotedContent": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withVoice": false
}"""

View File

@@ -1,2 +1,2 @@
import parser/[user, graphql] import parser/[user, graphql, timeline]
export user, graphql export user, graphql, timeline

View File

@@ -1,17 +1,11 @@
import options
import jsony import jsony
import user, ../types/[graphuser, graphlistmembers] import user, ../types/[graphuser, graphlistmembers]
from ../../types import User, Result, Query, QueryKind from ../../types import User, Result, Query, QueryKind
proc parseGraphUser*(json: string): User = proc parseGraphUser*(json: string): User =
let raw = json.fromJson(GraphUser) let raw = json.fromJson(GraphUser)
if raw.data.user.result.reason.get("") == "Suspended":
return User(suspended: true)
result = toUser raw.data.user.result.legacy result = toUser raw.data.user.result.legacy
result.id = raw.data.user.result.restId result.id = raw.data.user.result.restId
result.verified = result.verified or raw.data.user.result.isBlueVerified
proc parseGraphListMembers*(json, cursor: string): Result[User] = proc parseGraphListMembers*(json, cursor: string): Result[User] =
result = Result[User]( result = Result[User](

View File

@@ -0,0 +1,44 @@
import std/[json, strutils, times, math]
import utils
import ".."/types/[media, tweet]
from ../../types import Poll, Gif, Video, VideoVariant, VideoType
proc parseVideo*(entity: Entity): Video =
result = Video(
thumb: entity.mediaUrlHttps.getImageUrl,
views: entity.ext.mediaStats{"r", "ok", "viewCount"}.getStr,
available: entity.extMediaAvailability.status == "available",
title: entity.extAltText,
durationMs: entity.videoInfo.durationMillis,
description: entity.additionalMediaInfo.description,
variants: entity.videoInfo.variants
# playbackType: mp4
)
if entity.additionalMediaInfo.title.len > 0:
result.title = entity.additionalMediaInfo.title
proc parseGif*(entity: Entity): Gif =
result = Gif(
url: entity.videoInfo.variants[0].url.getImageUrl,
thumb: entity.getImageUrl
)
proc parsePoll*(card: Card): Poll =
let vals = card.bindingValues
# name format is pollNchoice_*
for i in '1' .. card.name[4]:
let choice = "choice" & i
result.values.add parseInt(vals{choice & "_count", "string_value"}.getStr("0"))
result.options.add vals{choice & "_label", "string_value"}.getStr
let time = vals{"end_datetime_utc", "string_value"}.getStr.parseIsoDate
if time > now():
let timeLeft = $(time - now())
result.status = timeLeft[0 ..< timeLeft.find(",")]
else:
result.status = "Final results"
result.leader = result.values.find(max(result.values))
result.votes = result.values.sum

View File

@@ -1,15 +1,14 @@
import std/[macros, htmlgen, unicode] import std/[macros, htmlgen, unicode]
import ../types/common
import ".."/../[formatters, utils] import ".."/../[formatters, utils]
type type
ReplaceSliceKind = enum ReplaceSliceKind* = enum
rkRemove, rkUrl, rkHashtag, rkMention rkRemove, rkUrl, rkHashtag, rkMention
ReplaceSlice* = object ReplaceSlice* = object
slice: Slice[int] slice*: Slice[int]
kind: ReplaceSliceKind kind*: ReplaceSliceKind
url, display: string url*, display*: string
proc cmp*(x, y: ReplaceSlice): int = cmp(x.slice.a, y.slice.b) proc cmp*(x, y: ReplaceSlice): int = cmp(x.slice.a, y.slice.b)
@@ -27,11 +26,14 @@ proc dedupSlices*(s: var seq[ReplaceSlice]) =
inc j inc j
inc i inc i
proc extractUrls*(result: var seq[ReplaceSlice]; url: Url; proc extractHashtags*(result: var seq[ReplaceSlice]; slice: Slice[int]) =
textLen: int; hideTwitter = false) = result.add ReplaceSlice(kind: rkHashtag, slice: slice)
proc extractUrls*[T](result: var seq[ReplaceSlice]; entity: T;
textLen: int; hideTwitter = false) =
let let
link = url.expandedUrl link = entity.expandedUrl
slice = url.indices[0] ..< url.indices[1] slice = entity.indices
if hideTwitter and slice.b.succ >= textLen and link.isTwitterUrl: if hideTwitter and slice.b.succ >= textLen and link.isTwitterUrl:
if slice.a < textLen: if slice.a < textLen:

View File

@@ -0,0 +1,84 @@
import std/[strutils, tables, options]
import jsony
import user, tweet, utils, ../types/timeline
from ../../types import Result, User, Tweet
proc parseHook(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]
proc getId(id: string): string {.inline.} =
let start = id.rfind("-")
if start < 0: return id
id[start + 1 ..< id.len]
proc processTweet(id: string; objects: GlobalObjects;
userCache: var Table[string, User]): Tweet =
let raw = objects.tweets[id]
result = toTweet raw
let uid = result.user.id
if uid.len > 0 and uid in objects.users:
if uid notin userCache:
userCache[uid] = toUser objects.users[uid]
result.user = userCache[uid]
let rtId = raw.retweetedStatusIdStr
if rtId.len > 0:
if rtId in objects.tweets:
result.retweet = some processTweet(rtId, objects, userCache)
else:
result.retweet = some Tweet(id: rtId.toId)
let qId = raw.quotedStatusIdStr
if qId.len > 0:
if qId in objects.tweets:
result.quote = some processTweet(qId, objects, userCache)
else:
result.quote = some Tweet(id: qId.toId)
proc parseCursor[T](e: Entry; result: var Result[T]) =
let cursor = e.content.operation.cursor
if cursor.cursorType == "Top":
result.top = cursor.value
elif cursor.cursorType == "Bottom":
result.bottom = cursor.value
proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0)
let raw = json.fromJson(Search)
if raw.timeline.instructions.len == 0:
return
for e in raw.timeline.instructions[0].addEntries.entries:
let
eId = e.entryId
id = eId.getId
if eId.startsWith("user") or eId.startsWith("sq-U"):
if id in raw.globalObjects.users:
result.content.add toUser raw.globalObjects.users[id]
elif eId.startsWith("cursor") or eId.startsWith("sq-C"):
parseCursor(e, result)
proc parseTweets*(json: string; after=""): Result[Tweet] =
result = Result[Tweet](beginning: after.len == 0)
let raw = json.fromJson(Search)
if raw.timeline.instructions.len == 0:
return
var userCache: Table[string, User]
for e in raw.timeline.instructions[0].addEntries.entries:
let
eId = e.entryId
id = eId.getId
if eId.startsWith("tweet") or eId.startsWith("sq-I-t"):
if id in raw.globalObjects.tweets:
result.content.add processTweet(id, raw.globalObjects, userCache)
elif eId.startsWith("cursor") or eId.startsWith("sq-C"):
parseCursor(e, result)

View File

@@ -0,0 +1,97 @@
import std/[strutils, options, algorithm, json]
import std/unicode except strip
import utils, slices, media, user
import ../types/tweet
from ../types/media as mediaTypes import MediaType
from ../../types import Tweet, User, TweetStats
proc expandTweetEntities(tweet: var Tweet; raw: RawTweet) =
let
orig = raw.fullText.toRunes
textRange = raw.displayTextRange
textSlice = textRange[0] .. textRange[1]
hasCard = raw.card.isSome
var replyTo = ""
if tweet.replyId > 0:
tweet.reply.add raw.inReplyToScreenName
replyTo = raw.inReplyToScreenName
var replacements = newSeq[ReplaceSlice]()
for u in raw.entities.urls:
if u.url.len == 0 or u.url notin raw.fullText:
continue
replacements.extractUrls(u, textSlice.b, hideTwitter=raw.isQuoteStatus)
# if hasCard and u.url == get(tweet.card).url:
# get(tweet.card).url = u.expandedUrl
for m in raw.entities.media:
replacements.extractUrls(m, textSlice.b, hideTwitter=true)
for hashtag in raw.entities.hashtags:
replacements.extractHashtags(hashtag.indices)
for symbol in raw.entities.symbols:
replacements.extractHashtags(symbol.indices)
for mention in raw.entities.userMentions:
let
name = mention.screenName
idx = tweet.reply.find(name)
if mention.indices.a >= textSlice.a:
replacements.add ReplaceSlice(kind: rkMention, slice: mention.indices,
url: "/" & name, display: mention.name)
if idx > -1 and name != replyTo:
tweet.reply.delete idx
elif idx == -1 and tweet.replyId != 0:
tweet.reply.add name
replacements.dedupSlices
replacements.sort(cmp)
tweet.text = orig.replacedWith(replacements, textSlice)
.strip(leading=false)
proc toTweet*(raw: RawTweet): Tweet =
result = Tweet(
id: raw.idStr.toId,
threadId: raw.conversationIdStr.toId,
replyId: raw.inReplyToStatusIdStr.toId,
time: parseTwitterDate(raw.createdAt),
hasThread: raw.selfThread.idStr.len > 0,
available: true,
user: User(id: raw.userIdStr),
stats: TweetStats(
replies: raw.replyCount,
retweets: raw.retweetCount,
likes: raw.favoriteCount,
quotes: raw.quoteCount
)
)
result.expandTweetEntities(raw)
if raw.card.isSome:
let card = raw.card.get
if "poll" in card.name:
result.poll = some parsePoll(card)
if "image" in card.name:
result.photos.add card.bindingValues{"image_large", "image_value", "url"}
.getStr.getImageUrl
# elif card.name == "amplify":
# discard
# # result.video = some(parsePromoVideo(jsCard{"binding_values"}))
# else:
# result.card = some parseCard(card, raw.entities.urls)
for m in raw.extendedEntities.media:
case m.kind
of photo: result.photos.add m.getImageUrl
of video:
result.video = some parseVideo(m)
if m.additionalMediaInfo.sourceUser.isSome:
result.attribution = some toUser get(m.additionalMediaInfo.sourceUser)
of animatedGif: result.gif = some parseGif(m)

View File

@@ -1,11 +1,8 @@
import std/[options, tables, strutils, strformat, sugar] import std/[options, tables, strformat]
import jsony import jsony
import ../types/unifiedcard import utils
import ".."/types/[unifiedcard, media]
from ../../types import Card, CardKind, Video from ../../types import Card, CardKind, Video
from ../../utils import twimg, https
proc getImageUrl(entity: MediaEntity): string =
entity.mediaUrlHttps.dup(removePrefix(twimg), removePrefix(https))
proc parseDestination(id: string; card: UnifiedCard; result: var Card) = proc parseDestination(id: string; card: UnifiedCard; result: var Card) =
let destination = card.destinationObjects[id].data let destination = card.destinationObjects[id].data
@@ -66,8 +63,7 @@ proc parseMedia(component: Component; card: UnifiedCard; result: var Card) =
durationMs: videoInfo.durationMillis, durationMs: videoInfo.durationMillis,
variants: videoInfo.variants variants: videoInfo.variants
) )
of model3d: of animatedGif: discard
result.title = "Unsupported 3D model ad"
proc parseUnifiedCard*(json: string): Card = proc parseUnifiedCard*(json: string): Card =
let card = json.fromJson(UnifiedCard) let card = json.fromJson(UnifiedCard)
@@ -80,14 +76,10 @@ proc parseUnifiedCard*(json: string): Card =
component.data.parseAppDetails(card, result) component.data.parseAppDetails(card, result)
of mediaWithDetailsHorizontal: of mediaWithDetailsHorizontal:
component.data.parseMediaDetails(card, result) component.data.parseMediaDetails(card, result)
of media, swipeableMedia: of ComponentType.media, swipeableMedia:
component.parseMedia(card, result) component.parseMedia(card, result)
of buttonGroup: of buttonGroup:
discard discard
of ComponentType.hidden:
result.kind = CardKind.hidden
of ComponentType.unknown:
echo "ERROR: Unknown component type: ", json
case component.kind case component.kind
of twitterListDetails: of twitterListDetails:

View File

@@ -1,14 +1,14 @@
import std/[algorithm, unicode, re, strutils, strformat, options, nre] import std/[algorithm, unicode, re, strutils, strformat, options]
import jsony import jsony
import utils, slices import utils, slices
import ../types/user as userType import ../types/user as userType
from ../../types import Result, User, Error from ../../types import User, Error
let let
unRegex = re.re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})" unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
unReplace = "$1<a href=\"/$2\">@$2</a>" unReplace = "$1<a href=\"/$2\">@$2</a>"
htRegex = nre.re"""(*U)(^|[^\w-_.?])([#$])([\w_]*+)(?!</a>|">|#)""" htRegex = re"(^|[^\w-_./?])([#$])([\w_]+)"
htReplace = "$1<a href=\"/search?q=%23$3\">$2$3</a>" htReplace = "$1<a href=\"/search?q=%23$3\">$2$3</a>"
proc expandUserEntities(user: var User; raw: RawUser) = proc expandUserEntities(user: var User; raw: RawUser) =
@@ -29,7 +29,7 @@ proc expandUserEntities(user: var User; raw: RawUser) =
user.bio = orig.replacedWith(replacements, 0 .. orig.len) user.bio = orig.replacedWith(replacements, 0 .. orig.len)
.replacef(unRegex, unReplace) .replacef(unRegex, unReplace)
.replace(htRegex, htReplace) .replacef(htRegex, htReplace)
proc getBanner(user: RawUser): string = proc getBanner(user: RawUser): string =
if user.profileBannerUrl.len > 0: if user.profileBannerUrl.len > 0:
@@ -76,12 +76,3 @@ proc parseUser*(json: string; username=""): User =
else: echo "[error - parseUser]: ", error else: echo "[error - parseUser]: ", error
result = toUser json.fromJson(RawUser) result = toUser json.fromJson(RawUser)
proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0)
# starting with '{' means it's an error
if json[0] == '[':
let raw = json.fromJson(seq[RawUser])
for user in raw:
result.content.add user.toUser

View File

@@ -1,12 +1,16 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import std/[sugar, strutils, times] import std/[sugar, strutils, times]
import ../types/common import ".."/types/[common, media, tweet]
import ../../utils as uutils import ../../utils as uutils
template parseTime(time: string; f: static string; flen: int): DateTime = template parseTime(time: string; f: static string; flen: int): DateTime =
if time.len != flen: return if time.len != flen: return
parse(time, f, utc()) parse(time, f, utc())
proc toId*(id: string): int64 =
if id.len == 0: 0'i64
else: parseBiggestInt(id)
proc parseIsoDate*(date: string): DateTime = proc parseIsoDate*(date: string): DateTime =
date.parseTime("yyyy-MM-dd\'T\'HH:mm:ss\'Z\'", 20) date.parseTime("yyyy-MM-dd\'T\'HH:mm:ss\'Z\'", 20)
@@ -16,6 +20,9 @@ proc parseTwitterDate*(date: string): DateTime =
proc getImageUrl*(url: string): string = proc getImageUrl*(url: string): string =
url.dup(removePrefix(twimg), removePrefix(https)) url.dup(removePrefix(twimg), removePrefix(https))
proc getImageUrl*(entity: MediaEntity | Entity): string =
entity.mediaUrlHttps.getImageUrl
template handleErrors*(body) = template handleErrors*(body) =
if json.startsWith("{\"errors"): if json.startsWith("{\"errors"):
for error {.inject.} in json.fromJson(Errors).errors: for error {.inject.} in json.fromJson(Errors).errors:

View File

@@ -1,3 +1,4 @@
import jsony
from ../../types import Error from ../../types import Error
type type
@@ -5,7 +6,7 @@ type
url*: string url*: string
expandedUrl*: string expandedUrl*: string
displayUrl*: string displayUrl*: string
indices*: array[2, int] indices*: Slice[int]
ErrorObj* = object ErrorObj* = object
code*: Error code*: Error
@@ -18,3 +19,8 @@ proc contains*(codes: set[Error]; errors: Errors): bool =
for e in errors.errors: for e in errors.errors:
if e.code in codes: if e.code in codes:
return true return true
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -1,4 +1,3 @@
import options
import user import user
type type
@@ -11,5 +10,3 @@ type
UserResult = object UserResult = object
legacy*: RawUser legacy*: RawUser
restId*: string restId*: string
isBlueVerified*: bool
reason*: Option[string]

View File

@@ -0,0 +1,15 @@
import options
from ../../types import VideoType, VideoVariant
type
MediaType* = enum
photo, video, animatedGif
MediaEntity* = object
kind*: MediaType
mediaUrlHttps*: string
videoInfo*: Option[VideoInfo]
VideoInfo* = object
durationMillis*: int
variants*: seq[VideoVariant]

View File

@@ -1,13 +1,14 @@
import std/tables import std/tables
import user import user, tweet
type type
Search* = object Search* = object
globalObjects*: GlobalObjects globalObjects*: GlobalObjects
timeline*: Timeline timeline*: Timeline
GlobalObjects = object GlobalObjects* = object
users*: Table[string, RawUser] users*: Table[string, RawUser]
tweets*: Table[string, RawTweet]
Timeline = object Timeline = object
instructions*: seq[Instructions] instructions*: seq[Instructions]
@@ -15,9 +16,13 @@ type
Instructions = object Instructions = object
addEntries*: tuple[entries: seq[Entry]] addEntries*: tuple[entries: seq[Entry]]
Entry = object Entry* = object
entryId*: string entryId*: string
content*: tuple[operation: Operation] content*: tuple[operation: Operation]
Operation = object Operation = object
cursor*: tuple[value, cursorType: string] cursor*: tuple[value, cursorType: string]
proc renameHook*(v: var Entity; fieldName: var string) =
if fieldName == "type":
fieldName = "kind"

View File

@@ -0,0 +1,85 @@
import options
import jsony
from json import JsonNode
import user, media, common
type
RawTweet* = object
createdAt*: string
idStr*: string
fullText*: string
displayTextRange*: array[2, int]
entities*: Entities
extendedEntities*: ExtendedEntities
inReplyToStatusIdStr*: string
inReplyToScreenName*: string
userIdStr*: string
isQuoteStatus*: bool
replyCount*: int
retweetCount*: int
favoriteCount*: int
quoteCount*: int
conversationIdStr*: string
favorited*: bool
retweeted*: bool
selfThread*: tuple[idStr: string]
card*: Option[Card]
quotedStatusIdStr*: string
retweetedStatusIdStr*: string
Card* = object
name*: string
url*: string
bindingValues*: JsonNode
Entities* = object
hashtags*: seq[Hashtag]
symbols*: seq[Hashtag]
userMentions*: seq[UserMention]
urls*: seq[Url]
media*: seq[Entity]
Hashtag* = object
indices*: Slice[int]
UserMention* = object
screenName*: string
name*: string
indices*: Slice[int]
ExtendedEntities* = object
media*: seq[Entity]
Entity* = object
kind*: MediaType
indices*: Slice[int]
mediaUrlHttps*: string
url*: string
expandedUrl*: string
videoInfo*: VideoInfo
ext*: Ext
extMediaAvailability*: tuple[status: string]
extAltText*: string
additionalMediaInfo*: AdditionalMediaInfo
sourceStatusIdStr*: string
sourceUserIdStr*: string
AdditionalMediaInfo* = object
sourceUser*: Option[RawUser]
title*: string
description*: string
Ext* = object
mediaStats*: JsonNode
MediaStats* = object
ok*: tuple[viewCount: string]
proc renameHook*(v: var Entity; fieldName: var string) =
if fieldName == "type":
fieldName = "kind"
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -1,5 +1,5 @@
import options, tables import options, tables
from ../../types import VideoType, VideoVariant import media as mediaTypes
type type
UnifiedCard* = object UnifiedCard* = object
@@ -17,8 +17,6 @@ type
twitterListDetails twitterListDetails
communityDetails communityDetails
mediaWithDetailsHorizontal mediaWithDetailsHorizontal
hidden
unknown
Component* = object Component* = object
kind*: ComponentType kind*: ComponentType
@@ -40,25 +38,13 @@ type
id*: string id*: string
destination*: string destination*: string
Destination* = object
kind*: string
data*: tuple[urlData: UrlData]
UrlData* = object UrlData* = object
url*: string url*: string
vanity*: string vanity*: string
MediaType* = enum Destination* = object
photo, video, model3d kind*: string
data*: tuple[urlData: UrlData]
MediaEntity* = object
kind*: MediaType
mediaUrlHttps*: string
videoInfo*: Option[VideoInfo]
VideoInfo* = object
durationMillis*: int
variants*: seq[VideoVariant]
AppType* = enum AppType* = enum
androidApp, iPhoneApp, iPadApp androidApp, iPhoneApp, iPadApp
@@ -72,37 +58,10 @@ type
Text = object Text = object
content: string content: string
TypeField = Component | Destination | MediaEntity | AppStoreData HasTypeField = Component | Destination | MediaEntity | AppStoreData
converter fromText*(text: Text): string = text.content converter fromText*(text: Text): string = text.content
proc renameHook*(v: var TypeField; fieldName: var string) = proc renameHook*(v: var HasTypeField; fieldName: var string) =
if fieldName == "type": if fieldName == "type":
fieldName = "kind" fieldName = "kind"
proc enumHook*(s: string; v: var ComponentType) =
v = case s
of "details": details
of "media": media
of "swipeable_media": swipeableMedia
of "button_group": buttonGroup
of "app_store_details": appStoreDetails
of "twitter_list_details": twitterListDetails
of "community_details": communityDetails
of "media_with_details_horizontal": mediaWithDetailsHorizontal
of "commerce_drop_details": hidden
else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown
proc enumHook*(s: string; v: var AppType) =
v = case s
of "android_app": androidApp
of "iphone_app": iPhoneApp
of "ipad_app": iPadApp
else: echo "ERROR: Unknown enum value (AppType): ", s; androidApp
proc enumHook*(s: string; v: var MediaType) =
v = case s
of "video": video
of "photo": photo
of "model3d": model3d
else: echo "ERROR: Unknown enum value (MediaType): ", s; photo

View File

@@ -1,4 +1,5 @@
import options import options
import jsony
import common import common
type type
@@ -41,3 +42,8 @@ type
Color* = object Color* = object
red*, green*, blue*: int red*, green*, blue*: int
proc parseHook*(s: string; i: var int; v: var Slice[int]) =
var slice: array[2, int]
parseHook(s, i, slice)
v = slice[0] ..< slice[1]

View File

@@ -12,7 +12,8 @@ let
twRegex = re"(?<=(?<!\S)https:\/\/|(?<=\s))(www\.|mobile\.)?twitter\.com" twRegex = re"(?<=(?<!\S)https:\/\/|(?<=\s))(www\.|mobile\.)?twitter\.com"
twLinkRegex = re"""<a href="https:\/\/twitter.com([^"]+)">twitter\.com(\S+)</a>""" twLinkRegex = re"""<a href="https:\/\/twitter.com([^"]+)">twitter\.com(\S+)</a>"""
ytRegex = re(r"([A-z.]+\.)?youtu(be\.com|\.be)", {reStudy, reIgnoreCase}) ytRegex = re"([A-z.]+\.)?youtu(be\.com|\.be)"
igRegex = re"(www\.)?instagram\.com"
rdRegex = re"(?<![.b])((www|np|new|amp|old)\.)?reddit.com" rdRegex = re"(?<![.b])((www|np|new|amp|old)\.)?reddit.com"
rdShortRegex = re"(?<![.b])redd\.it\/" rdShortRegex = re"(?<![.b])redd\.it\/"
@@ -55,6 +56,8 @@ proc replaceUrls*(body: string; prefs: Prefs; absolute=""): string =
if prefs.replaceYouTube.len > 0 and "youtu" in result: if prefs.replaceYouTube.len > 0 and "youtu" in result:
result = result.replace(ytRegex, prefs.replaceYouTube) result = result.replace(ytRegex, prefs.replaceYouTube)
if prefs.replaceYouTube in result:
result = result.replace("/c/", "/")
if prefs.replaceTwitter.len > 0 and ("twitter.com" in body or tco in body): if prefs.replaceTwitter.len > 0 and ("twitter.com" in body or tco in body):
result = result.replace(tco, https & prefs.replaceTwitter & "/t.co") result = result.replace(tco, https & prefs.replaceTwitter & "/t.co")
@@ -69,8 +72,11 @@ proc replaceUrls*(body: string; prefs: Prefs; absolute=""): string =
if prefs.replaceReddit in result and "/gallery/" in result: if prefs.replaceReddit in result and "/gallery/" in result:
result = result.replace("/gallery/", "/comments/") result = result.replace("/gallery/", "/comments/")
if prefs.replaceInstagram.len > 0 and "instagram.com" in result:
result = result.replace(igRegex, prefs.replaceInstagram)
if absolute.len > 0 and "href" in result: if absolute.len > 0 and "href" in result:
result = result.replace("href=\"/", &"href=\"{absolute}/") result = result.replace("href=\"/", "href=\"" & absolute & "/")
proc getM3u8Url*(content: string): string = proc getM3u8Url*(content: string): string =
var matches: array[1, string] var matches: array[1, string]

View File

@@ -42,11 +42,5 @@ template use*(pool: HttpPool; heads: HttpHeaders; body: untyped): untyped =
except ProtocolError: except ProtocolError:
# Twitter closed the connection, retry # Twitter closed the connection, retry
body body
except BadClientError:
# Twitter returned 503, we need a new client
pool.release(c, true)
badClient = false
c = pool.acquire(heads)
body
finally: finally:
pool.release(c, badClient) pool.release(c, badClient)

View File

@@ -56,7 +56,6 @@ settings:
port = Port(cfg.port) port = Port(cfg.port)
staticDir = cfg.staticDir staticDir = cfg.staticDir
bindAddr = cfg.address bindAddr = cfg.address
reusePort = true
routes: routes:
get "/": get "/":
@@ -85,23 +84,20 @@ routes:
resp Http500, showError( resp Http500, showError(
&"An error occurred, please {link} with the URL you tried to visit.", cfg) &"An error occurred, please {link} with the URL you tried to visit.", cfg)
error BadClientError:
echo error.exc.name, ": ", error.exc.msg
resp Http500, showError("Network error occured, please try again.", cfg)
error RateLimitError: error RateLimitError:
echo error.exc.name, ": ", error.exc.msg
const link = a("another instance", href = instancesUrl) const link = a("another instance", href = instancesUrl)
resp Http429, showError( resp Http429, showError(
&"Instance has been rate limited.<br>Use {link} or try again later.", cfg) &"Instance has been rate limited.<br>Use {link} or try again later.", cfg)
extend rss, "" extend unsupported, ""
extend status, ""
extend search, ""
extend timeline, ""
extend media, ""
extend list, ""
extend preferences, "" extend preferences, ""
extend resolver, "" extend resolver, ""
extend rss, ""
extend search, ""
extend timeline, ""
extend list, ""
extend status, ""
extend media, ""
extend embed, "" extend embed, ""
extend debug, "" extend debug, ""
extend unsupported, ""

View File

@@ -4,8 +4,6 @@ import packedjson, packedjson/deserialiser
import types, parserutils, utils import types, parserutils, utils
import experimental/parser/unifiedcard import experimental/parser/unifiedcard
proc parseGraphTweet(js: JsonNode): Tweet
proc parseUser(js: JsonNode; id=""): User = proc parseUser(js: JsonNode; id=""): User =
if js.isNull: return if js.isNull: return
result = User( result = User(
@@ -21,20 +19,13 @@ proc parseUser(js: JsonNode; id=""): User =
tweets: js{"statuses_count"}.getInt, tweets: js{"statuses_count"}.getInt,
likes: js{"favourites_count"}.getInt, likes: js{"favourites_count"}.getInt,
media: js{"media_count"}.getInt, media: js{"media_count"}.getInt,
verified: js{"verified"}.getBool or js{"ext_is_blue_verified"}.getBool, verified: js{"verified"}.getBool,
protected: js{"protected"}.getBool, protected: js{"protected"}.getBool,
joinDate: js{"created_at"}.getTime joinDate: js{"created_at"}.getTime
) )
result.expandUserEntities(js) result.expandUserEntities(js)
proc parseGraphUser(js: JsonNode): User =
let user = ? js{"user_results", "result"}
result = parseUser(user{"legacy"})
if "is_blue_verified" in user:
result.verified = true
proc parseGraphList*(js: JsonNode): List = proc parseGraphList*(js: JsonNode): List =
if js.isNull: return if js.isNull: return
@@ -47,13 +38,14 @@ proc parseGraphList*(js: JsonNode): List =
result = List( result = List(
id: list{"id_str"}.getStr, id: list{"id_str"}.getStr,
name: list{"name"}.getStr, name: list{"name"}.getStr,
username: list{"user_results", "result", "legacy", "screen_name"}.getStr, username: list{"user", "legacy", "screen_name"}.getStr,
userId: list{"user_results", "result", "rest_id"}.getStr, userId: list{"user", "rest_id"}.getStr,
description: list{"description"}.getStr, description: list{"description"}.getStr,
members: list{"member_count"}.getInt, members: list{"member_count"}.getInt,
banner: list{"custom_banner_media", "media_info", "original_img_url"}.getImageStr banner: list{"custom_banner_media", "media_info", "url"}.getImageStr
) )
proc parsePoll(js: JsonNode): Poll = proc parsePoll(js: JsonNode): Poll =
let vals = js{"binding_values"} let vals = js{"binding_values"}
# name format is pollNchoice_* # name format is pollNchoice_*
@@ -81,11 +73,11 @@ proc parseGif(js: JsonNode): Gif =
proc parseVideo(js: JsonNode): Video = proc parseVideo(js: JsonNode): Video =
result = Video( result = Video(
thumb: js{"media_url_https"}.getImageStr, thumb: js{"media_url_https"}.getImageStr,
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr($js{"mediaStats", "viewCount"}.getInt), views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr,
available: js{"ext_media_availability", "status"}.getStr.toLowerAscii == "available", available: js{"ext_media_availability", "status"}.getStr == "available",
title: js{"ext_alt_text"}.getStr, title: js{"ext_alt_text"}.getStr,
durationMs: js{"video_info", "duration_millis"}.getInt, durationMs: js{"video_info", "duration_millis"}.getInt
playbackType: m3u8 # playbackType: mp4
) )
with title, js{"additional_media_info", "title"}: with title, js{"additional_media_info", "title"}:
@@ -95,18 +87,10 @@ proc parseVideo(js: JsonNode): Video =
result.description = description.getStr result.description = description.getStr
for v in js{"video_info", "variants"}: for v in js{"video_info", "variants"}:
let
contentType = parseEnum[VideoType](v{"content_type"}.getStr("summary"))
url = v{"url"}.getStr
if contentType == mp4:
result.playbackType = mp4
result.variants.add VideoVariant( result.variants.add VideoVariant(
contentType: contentType, contentType: parseEnum[VideoType](v{"content_type"}.getStr("summary")),
bitrate: v{"bitrate"}.getInt, bitrate: v{"bitrate"}.getInt,
url: url, url: v{"url"}.getStr
resolution: if contentType == mp4: getMp4Resolution(url) else: 0
) )
proc parsePromoVideo(js: JsonNode): Video = proc parsePromoVideo(js: JsonNode): Video =
@@ -197,7 +181,7 @@ proc parseCard(js: JsonNode; urls: JsonNode): Card =
result.url.len == 0 or result.url.startsWith("card://"): result.url.len == 0 or result.url.startsWith("card://"):
result.url = getPicUrl(result.image) result.url = getPicUrl(result.image)
proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet = proc parseTweet(js: JsonNode): Tweet =
if js.isNull: return if js.isNull: return
result = Tweet( result = Tweet(
id: js{"id_str"}.getId, id: js{"id_str"}.getId,
@@ -218,26 +202,14 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
result.expandTweetEntities(js) result.expandTweetEntities(js)
# fix for pinned threads
if result.hasThread and result.threadId == 0:
result.threadId = js{"self_thread", "id_str"}.getId
if js{"is_quote_status"}.getBool: if js{"is_quote_status"}.getBool:
result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId) result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId)
# legacy
with rt, js{"retweeted_status_id_str"}: with rt, js{"retweeted_status_id_str"}:
result.retweet = some Tweet(id: rt.getId) result.retweet = some Tweet(id: rt.getId)
return return
# graphql with jsCard, js{"card"}:
with rt, js{"retweeted_status_result", "result"}:
# needed due to weird edgecase where the actual tweet data isn't included
if "legacy" in rt:
result.retweet = some parseGraphTweet(rt)
return
if jsCard.kind != JNull:
let name = jsCard{"name"}.getStr let name = jsCard{"name"}.getStr
if "poll" in name: if "poll" in name:
if "image" in name: if "image" in name:
@@ -257,10 +229,7 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
of "video": of "video":
result.video = some(parseVideo(m)) result.video = some(parseVideo(m))
with user, m{"additional_media_info", "source_user"}: with user, m{"additional_media_info", "source_user"}:
if user{"id"}.getInt > 0: result.attribution = some(parseUser(user))
result.attribution = some(parseUser(user))
else:
result.attribution = some(parseGraphUser(user))
of "animated_gif": of "animated_gif":
result.gif = some(parseGif(m)) result.gif = some(parseGif(m))
else: discard else: discard
@@ -317,11 +286,70 @@ proc parseGlobalObjects(js: JsonNode): GlobalObjects =
result.users[k] = parseUser(v, k) result.users[k] = parseUser(v, k)
for k, v in tweets: for k, v in tweets:
var tweet = parseTweet(v, v{"card"}) var tweet = parseTweet(v)
if tweet.user.id in result.users: if tweet.user.id in result.users:
tweet.user = result.users[tweet.user.id] tweet.user = result.users[tweet.user.id]
result.tweets[k] = tweet result.tweets[k] = tweet
proc parseThread(js: JsonNode; global: GlobalObjects): tuple[thread: Chain, self: bool] =
result.thread = Chain()
let thread = js{"content", "item", "content", "conversationThread"}
with cursor, thread{"showMoreCursor"}:
result.thread.cursor = cursor{"value"}.getStr
result.thread.hasMore = true
for t in thread{"conversationComponents"}:
let content = t{"conversationTweetComponent", "tweet"}
if content{"displayType"}.getStr == "SelfThread":
result.self = true
var tweet = finalizeTweet(global, content{"id"}.getStr)
if not tweet.available:
tweet.tombstone = getTombstone(content{"tombstone"})
result.thread.content.add tweet
proc parseConversation*(js: JsonNode; tweetId: string): Conversation =
result = Conversation(replies: Result[Chain](beginning: true))
let global = parseGlobalObjects(? js)
let instructions = ? js{"timeline", "instructions"}
if instructions.len == 0:
return
for e in instructions[0]{"addEntries", "entries"}:
let entry = e{"entryId"}.getStr
if "tweet" in entry or "tombstone" in entry:
let tweet = finalizeTweet(global, e.getEntryId)
if $tweet.id != tweetId:
result.before.content.add tweet
else:
result.tweet = tweet
elif "conversationThread" in entry:
let (thread, self) = parseThread(e, global)
if thread.content.len > 0:
if self:
result.after = thread
else:
result.replies.content.add thread
elif "cursor-showMore" in entry:
result.replies.bottom = e.getCursor
elif "cursor-bottom" in entry:
result.replies.bottom = e.getCursor
proc parseStatus*(js: JsonNode): Tweet =
with e, js{"errors"}:
if e.getError == tweetNotFound:
return
result = parseTweet(js)
if not result.isNil:
result.user = parseUser(js{"user"})
with quote, js{"quoted_status"}:
result.quote = some parseStatus(js{"quoted_status"})
proc parseInstructions[T](res: var Result[T]; global: GlobalObjects; js: JsonNode) = proc parseInstructions[T](res: var Result[T]; global: GlobalObjects; js: JsonNode) =
if js.kind != JArray or js.len == 0: if js.kind != JArray or js.len == 0:
return return
@@ -347,12 +375,7 @@ proc parseTimeline*(js: JsonNode; after=""): Timeline =
result.parseInstructions(global, instructions) result.parseInstructions(global, instructions)
var entries: JsonNode for e in instructions[0]{"addEntries", "entries"}:
for i in instructions:
if "addEntries" in i:
entries = i{"addEntries", "entries"}
for e in ? entries:
let entry = e{"entryId"}.getStr let entry = e{"entryId"}.getStr
if "tweet" in entry or entry.startsWith("sq-I-t") or "tombstone" in entry: if "tweet" in entry or entry.startsWith("sq-I-t") or "tombstone" in entry:
let tweet = finalizeTweet(global, e.getEntryId) let tweet = finalizeTweet(global, e.getEntryId)
@@ -362,7 +385,7 @@ proc parseTimeline*(js: JsonNode; after=""): Timeline =
result.top = e.getCursor result.top = e.getCursor
elif "cursor-bottom" in entry: elif "cursor-bottom" in entry:
result.bottom = e.getCursor result.bottom = e.getCursor
elif entry.startsWith("sq-cursor"): elif entry.startsWith("sq-C"):
with cursor, e{"content", "operation", "cursor"}: with cursor, e{"content", "operation", "cursor"}:
if cursor{"cursorType"}.getStr == "Bottom": if cursor{"cursorType"}.getStr == "Bottom":
result.bottom = cursor{"value"}.getStr result.bottom = cursor{"value"}.getStr
@@ -372,7 +395,7 @@ proc parseTimeline*(js: JsonNode; after=""): Timeline =
proc parsePhotoRail*(js: JsonNode): PhotoRail = proc parsePhotoRail*(js: JsonNode): PhotoRail =
for tweet in js: for tweet in js:
let let
t = parseTweet(tweet, js{"card"}) t = parseTweet(tweet)
url = if t.photos.len > 0: t.photos[0] url = if t.photos.len > 0: t.photos[0]
elif t.video.isSome: get(t.video).thumb elif t.video.isSome: get(t.video).thumb
elif t.gif.isSome: get(t.gif).thumb elif t.gif.isSome: get(t.gif).thumb
@@ -381,141 +404,3 @@ proc parsePhotoRail*(js: JsonNode): PhotoRail =
if url.len == 0: continue if url.len == 0: continue
result.add GalleryPhoto(url: url, tweetId: $t.id) result.add GalleryPhoto(url: url, tweetId: $t.id)
proc parseGraphTweet(js: JsonNode): Tweet =
if js.kind == JNull:
return Tweet()
case js{"__typename"}.getStr
of "TweetUnavailable":
return Tweet()
of "TweetTombstone":
return Tweet(text: js{"tombstone", "text"}.getTombstone)
of "TweetPreviewDisplay":
return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.")
of "TweetWithVisibilityResults":
return parseGraphTweet(js{"tweet"})
var jsCard = copy(js{"card", "legacy"})
if jsCard.kind != JNull:
var values = newJObject()
for val in jsCard["binding_values"]:
values[val["key"].getStr] = val["value"]
jsCard["binding_values"] = values
result = parseTweet(js{"legacy"}, jsCard)
result.user = parseGraphUser(js{"core"})
with noteTweet, js{"note_tweet", "note_tweet_results", "result"}:
result.expandNoteTweetEntities(noteTweet)
if result.quote.isSome:
result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"}))
proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
let thread = js{"content", "items"}
for t in js{"content", "items"}:
let entryId = t{"entryId"}.getStr
if "cursor-showmore" in entryId:
let cursor = t{"item", "itemContent", "value"}
result.thread.cursor = cursor.getStr
result.thread.hasMore = true
elif "tweet" in entryId:
let tweet = parseGraphTweet(t{"item", "itemContent", "tweet_results", "result"})
result.thread.content.add tweet
if t{"item", "itemContent", "tweetDisplayType"}.getStr == "SelfThread":
result.self = true
proc parseGraphTweetResult*(js: JsonNode): Tweet =
with tweet, js{"data", "tweetResult", "result"}:
result = parseGraphTweet(tweet)
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result = Conversation(replies: Result[Chain](beginning: true))
let instructions = ? js{"data", "threaded_conversation_with_injections", "instructions"}
if instructions.len == 0:
return
for e in instructions[0]{"entries"}:
let entryId = e{"entryId"}.getStr
# echo entryId
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
if $tweet.id == tweetId:
result.tweet = tweet
else:
result.before.content.add tweet
elif entryId.startsWith("tombstone"):
let id = entryId.getId()
let tweet = Tweet(
id: parseBiggestInt(id),
available: false,
text: e{"content", "itemContent", "tombstoneInfo", "richText"}.getTombstone
)
if id == tweetId:
result.tweet = tweet
else:
result.before.content.add tweet
elif entryId.startsWith("conversationthread"):
let (thread, self) = parseGraphThread(e)
if self:
result.after = thread
else:
result.replies.content.add thread
elif entryId.startsWith("cursor-bottom"):
result.replies.bottom = e{"content", "itemContent", "value"}.getStr
proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Timeline =
result = Timeline(beginning: after.len == 0)
let instructions =
if root == "list": ? js{"data", "list", "tweets_timeline", "timeline", "instructions"}
else: ? js{"data", "user", "result", "timeline_v2", "timeline", "instructions"}
if instructions.len == 0:
return
for i in instructions:
if i{"type"}.getStr == "TimelineAddEntries":
for e in i{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
result = Timeline(beginning: after.len == 0)
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
if instructions.len == 0:
return
for instruction in instructions:
let typ = instruction{"type"}.getStr
if typ == "TimelineAddEntries":
for e in instructions[0]{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
elif typ == "TimelineReplaceEntry":
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
result.bottom = instruction{"entry", "content", "value"}.getStr

View File

@@ -28,13 +28,13 @@ template `?`*(js: JsonNode): untyped =
if j.isNull: return if j.isNull: return
j j
template with*(ident, value, body): untyped = template `with`*(ident, value, body): untyped =
if true: block:
let ident {.inject.} = value let ident {.inject.} = value
if ident != nil: body if ident != nil: body
template with*(ident; value: JsonNode; body): untyped = template `with`*(ident; value: JsonNode; body): untyped =
if true: block:
let ident {.inject.} = value let ident {.inject.} = value
if value.notNull: body if value.notNull: body
@@ -130,24 +130,9 @@ proc getBanner*(js: JsonNode): string =
return return
proc getTombstone*(js: JsonNode): string = proc getTombstone*(js: JsonNode): string =
result = js{"text"}.getStr result = js{"tombstoneInfo", "richText", "text"}.getStr
result.removeSuffix(" Learn more") result.removeSuffix(" Learn more")
proc getMp4Resolution*(url: string): int =
# parses the height out of a URL like this one:
# https://video.twimg.com/ext_tw_video/<tweet-id>/pu/vid/720x1280/<random>.mp4
const vidSep = "/vid/"
let
vidIdx = url.find(vidSep) + vidSep.len
resIdx = url.find('x', vidIdx) + 1
res = url[resIdx ..< url.find("/", resIdx)]
try:
return parseInt(res)
except ValueError:
# cannot determine resolution (e.g. m3u8/non-mp4 video)
return 0
proc extractSlice(js: JsonNode): Slice[int] = proc extractSlice(js: JsonNode): Slice[int] =
result = js["indices"][0].getInt ..< js["indices"][1].getInt result = js["indices"][0].getInt ..< js["indices"][1].getInt
@@ -230,37 +215,47 @@ proc expandUserEntities*(user: var User; js: JsonNode) =
user.bio = user.bio.replacef(unRegex, unReplace) user.bio = user.bio.replacef(unRegex, unReplace)
.replacef(htRegex, htReplace) .replacef(htRegex, htReplace)
proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlice: Slice[int]; proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
replyTo=""; hasQuote=false) = let
let hasCard = tweet.card.isSome orig = tweet.text.toRunes
textRange = js{"display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt
hasQuote = js{"is_quote_status"}.getBool
hasCard = tweet.card.isSome
var replyTo = ""
if tweet.replyId != 0:
with reply, js{"in_reply_to_screen_name"}:
tweet.reply.add reply.getStr
replyTo = reply.getStr
let ent = ? js{"entities"}
var replacements = newSeq[ReplaceSlice]() var replacements = newSeq[ReplaceSlice]()
with urls, entities{"urls"}: with urls, ent{"urls"}:
for u in urls: for u in urls:
let urlStr = u["url"].getStr let urlStr = u["url"].getStr
if urlStr.len == 0 or urlStr notin text: if urlStr.len == 0 or urlStr notin tweet.text:
continue continue
replacements.extractUrls(u, textSlice.b, hideTwitter = hasQuote) replacements.extractUrls(u, textSlice.b, hideTwitter = hasQuote)
if hasCard and u{"url"}.getStr == get(tweet.card).url: if hasCard and u{"url"}.getStr == get(tweet.card).url:
get(tweet.card).url = u{"expanded_url"}.getStr get(tweet.card).url = u{"expanded_url"}.getStr
with media, entities{"media"}: with media, ent{"media"}:
for m in media: for m in media:
replacements.extractUrls(m, textSlice.b, hideTwitter = true) replacements.extractUrls(m, textSlice.b, hideTwitter = true)
if "hashtags" in entities: if "hashtags" in ent:
for hashtag in entities["hashtags"]: for hashtag in ent["hashtags"]:
replacements.extractHashtags(hashtag) replacements.extractHashtags(hashtag)
if "symbols" in entities: if "symbols" in ent:
for symbol in entities["symbols"]: for symbol in ent["symbols"]:
replacements.extractHashtags(symbol) replacements.extractHashtags(symbol)
if "user_mentions" in entities: if "user_mentions" in ent:
for mention in entities["user_mentions"]: for mention in ent["user_mentions"]:
let let
name = mention{"screen_name"}.getStr name = mention{"screen_name"}.getStr
slice = mention.extractSlice slice = mention.extractSlice
@@ -277,27 +272,5 @@ proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlic
replacements.deduplicate replacements.deduplicate
replacements.sort(cmp) replacements.sort(cmp)
tweet.text = text.toRunes.replacedWith(replacements, textSlice).strip(leading=false) tweet.text = orig.replacedWith(replacements, textSlice)
.strip(leading=false)
proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
let
entities = ? js{"entities"}
hasQuote = js{"is_quote_status"}.getBool
textRange = js{"display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt
var replyTo = ""
if tweet.replyId != 0:
with reply, js{"in_reply_to_screen_name"}:
replyTo = reply.getStr
tweet.reply.add replyTo
tweet.expandTextEntities(entities, tweet.text, textSlice, replyTo, hasQuote)
proc expandNoteTweetEntities*(tweet: Tweet; js: JsonNode) =
let
entities = ? js{"entity_set"}
text = js{"text"}.getStr
textSlice = 0..text.runeLen
tweet.expandTextEntities(entities, text, textSlice)

View File

@@ -80,10 +80,10 @@ genPrefs:
Media: Media:
mp4Playback(checkbox, true): mp4Playback(checkbox, true):
"Enable mp4 video playback" "Enable mp4 video playback (only for gifs)"
hlsPlayback(checkbox, false): hlsPlayback(checkbox, false):
"Enable HLS video streaming (requires JavaScript)" "Enable hls video streaming (requires JavaScript)"
proxyVideos(checkbox, true): proxyVideos(checkbox, true):
"Proxy video streaming through the server (might be slow)" "Proxy video streaming through the server (might be slow)"
@@ -107,6 +107,10 @@ genPrefs:
"Reddit -> Teddit/Libreddit" "Reddit -> Teddit/Libreddit"
placeholder: "Teddit hostname" placeholder: "Teddit hostname"
replaceInstagram(input, ""):
"Instagram -> Bibliogram"
placeholder: "Bibliogram hostname"
iterator allPrefs*(): Pref = iterator allPrefs*(): Pref =
for k, v in prefList: for k, v in prefList:
for pref in v: for pref in v:

View File

@@ -93,11 +93,11 @@ proc genQueryUrl*(query: Query): string =
if query.text.len > 0: if query.text.len > 0:
params.add "q=" & encodeUrl(query.text) params.add "q=" & encodeUrl(query.text)
for f in query.filters: for f in query.filters:
params.add &"f-{f}=on" params.add "f-" & f & "=on"
for e in query.excludes: for e in query.excludes:
params.add &"e-{e}=on" params.add "e-" & e & "=on"
for i in query.includes.filterIt(it != "nativeretweets"): for i in query.includes.filterIt(it != "nativeretweets"):
params.add &"i-{i}=on" params.add "i-" & i & "=on"
if query.since.len > 0: if query.since.len > 0:
params.add "since=" & query.since params.add "since=" & query.since

View File

@@ -118,11 +118,11 @@ proc getUserId*(username: string): Future[string] {.async.} =
pool.withAcquire(r): pool.withAcquire(r):
result = await r.hGet(name.uidKey, name) result = await r.hGet(name.uidKey, name)
if result == redisNil: if result == redisNil:
let user = await getGraphUser(username) let user = await getUser(username)
if user.suspended: if user.suspended:
return "suspended" return "suspended"
else: else:
await all(cacheUserId(name, user.id), cache(user)) await cacheUserId(name, user.id)
return user.id return user.id
proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} = proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
@@ -130,7 +130,8 @@ proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
if prof != redisNil: if prof != redisNil:
prof.deserialize(User) prof.deserialize(User)
elif fetch: elif fetch:
result = await getGraphUser(username) let userId = await getUserId(username)
result = await getGraphUser(userId)
await cache(result) await cache(result)
proc getCachedUsername*(userId: string): Future[string] {.async.} = proc getCachedUsername*(userId: string): Future[string] {.async.} =
@@ -141,11 +142,9 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
if username != redisNil: if username != redisNil:
result = username result = username
else: else:
let user = await getGraphUserById(userId) let user = await getUserById(userId)
result = user.username result = user.username
await setEx(key, baseCacheTime, result) await setEx(key, baseCacheTime, result)
if result.len > 0 and user.id.len > 0:
await all(cacheUserId(result, user.id), cache(user))
proc getCachedTweet*(id: int64): Future[Tweet] {.async.} = proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
if id == 0: return if id == 0: return
@@ -153,8 +152,8 @@ proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
if tweet != redisNil: if tweet != redisNil:
tweet.deserialize(Tweet) tweet.deserialize(Tweet)
else: else:
result = await getGraphTweetResult($id) result = await getStatus($id)
if not result.isNil: if result.isNil:
await cache(result) await cache(result)
proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} = proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, strutils, strformat, options import asyncdispatch, strutils, options
import jester, karax/vdom import jester, karax/vdom
import ".."/[types, api] import ".."/[types, api]
import ../views/[embed, tweet, general] import ../views/[embed, tweet, general]
@@ -25,12 +25,12 @@ proc createEmbedRouter*(cfg: Config) =
if convo == nil or convo.tweet == nil: if convo == nil or convo.tweet == nil:
resp Http404 resp Http404
resp renderTweetEmbed(convo.tweet, path, prefs, cfg, request) resp $renderTweetEmbed(convo.tweet, path, prefs, cfg, request)
get "/embed/Tweet.html": get "/embed/Tweet.html":
let id = @"id" let id = @"id"
if id.len > 0: if id.len > 0:
redirect(&"/i/status/{id}/embed") redirect("/i/status/" & id & "/embed")
else: else:
resp Http404 resp Http404

View File

@@ -1,25 +1,23 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, uri import strutils, uri
import jester import jester
import router_utils import router_utils
import ".."/[types, redis_cache, api] import ".."/[types, redis_cache, api]
import ../views/[general, timeline, list] import ../views/[general, timeline, list]
export getListTimeline, getGraphList
template respList*(list, timeline, title, vnode: typed) = template respList*(list, timeline, title, vnode: typed) =
if list.id.len == 0 or list.name.len == 0: if list.id.len == 0 or list.name.len == 0:
resp Http404, showError(&"""List "{@"id"}" not found""", cfg) resp Http404, showError("List " & @"id" & " not found", cfg)
let let
html = renderList(vnode, timeline.query, list) html = renderList(vnode, timeline.query, list)
rss = &"""/i/lists/{@"id"}/rss""" rss = "/i/lists/$1/rss" % [@"id"]
resp renderMain(html, request, cfg, prefs, titleText=title, rss=rss, banner=list.banner) resp renderMain(html, request, cfg, prefs, titleText=title, rss=rss, banner=list.banner)
proc title*(list: List): string =
&"@{list.username}/{list.name}"
proc createListRouter*(cfg: Config) = proc createListRouter*(cfg: Config) =
router list: router list:
get "/@name/lists/@slug/?": get "/@name/lists/@slug/?":
@@ -30,22 +28,24 @@ proc createListRouter*(cfg: Config) =
slug = decodeUrl(@"slug") slug = decodeUrl(@"slug")
list = await getCachedList(@"name", slug) list = await getCachedList(@"name", slug)
if list.id.len == 0: if list.id.len == 0:
resp Http404, showError(&"""List "{@"slug"}" not found""", cfg) resp Http404, showError("List \"" & @"slug" & "\" not found", cfg)
redirect(&"/i/lists/{list.id}") redirect("/i/lists/" & list.id)
get "/i/lists/@id/?": get "/i/lists/@id/?":
cond '.' notin @"id" cond '.' notin @"id"
let let
prefs = cookiePrefs() prefs = cookiePrefs()
list = await getCachedList(id=(@"id")) list = await getCachedList(id=(@"id"))
timeline = await getGraphListTweets(list.id, getCursor()) title = "@" & list.username & "/" & list.name
timeline = await getListTimeline(list.id, getCursor())
vnode = renderTimelineTweets(timeline, prefs, request.path) vnode = renderTimelineTweets(timeline, prefs, request.path)
respList(list, timeline, list.title, vnode) respList(list, timeline, title, vnode)
get "/i/lists/@id/members": get "/i/lists/@id/members":
cond '.' notin @"id" cond '.' notin @"id"
let let
prefs = cookiePrefs() prefs = cookiePrefs()
list = await getCachedList(id=(@"id")) list = await getCachedList(id=(@"id"))
title = "@" & list.username & "/" & list.name
members = await getGraphListMembers(list, getCursor()) members = await getGraphListMembers(list, getCursor())
respList(list, members, list.title, renderTimelineUsers(members, prefs, request.path)) respList(list, members, title, renderTimelineUsers(members, prefs, request.path))

View File

@@ -12,8 +12,7 @@ export httpclient, os, strutils, asyncstreams, base64, re
const const
m3u8Mime* = "application/vnd.apple.mpegurl" m3u8Mime* = "application/vnd.apple.mpegurl"
mp4Mime* = "video/mp4" maxAge* = "max-age=604800"
maxAge* = "public, max-age=604800, must-revalidate"
proc safeFetch*(url: string): Future[string] {.async.} = proc safeFetch*(url: string): Future[string] {.async.} =
let client = newAsyncHttpClient() let client = newAsyncHttpClient()
@@ -21,84 +20,56 @@ proc safeFetch*(url: string): Future[string] {.async.} =
except: discard except: discard
finally: client.close() finally: client.close()
template respond*(req: asynchttpserver.Request; code: HttpCode; template respond*(req: asynchttpserver.Request; headers) =
headers: seq[(string, string)]) = var msg = "HTTP/1.1 200 OK\c\L"
var msg = "HTTP/1.1 " & $code & "\c\L" for k, v in headers:
for (k, v) in headers:
msg.add(k & ": " & v & "\c\L") msg.add(k & ": " & v & "\c\L")
msg.add "\c\L" msg.add "\c\L"
yield req.client.send(msg, flags={}) yield req.client.send(msg)
proc getContentLength(res: AsyncResponse): string =
result = "0"
if res.headers.hasKey("content-length"):
result = $res.contentLength
elif res.headers.hasKey("content-range"):
result = res.headers["content-range"]
result = result[result.find('/') + 1 .. ^1]
if result == "*":
result.setLen(0)
proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} = proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
result = Http200 result = Http200
let let
request = req.getNativeReq() request = req.getNativeReq()
hashed = $hash(url) client = newAsyncHttpClient()
if request.headers.getOrDefault("If-None-Match") == hashed:
return Http304
let c = newAsyncHttpClient(headers=newHttpHeaders({
"accept": "*/*",
"range": $req.headers.getOrDefault("range")
}))
try: try:
var res = await c.get(url) let res = await client.get(url)
if not res.status.startsWith("20"): if res.status != "200 OK":
return Http404 return Http404
var headers = @{ let hashed = $hash(url)
"accept-ranges": "bytes", if request.headers.getOrDefault("If-None-Match") == hashed:
"content-type": $res.headers.getOrDefault("content-type"), return Http304
"cache-control": maxAge,
"age": $res.headers.getOrDefault("age"),
"date": $res.headers.getOrDefault("date"),
"last-modified": $res.headers.getOrDefault("last-modified")
}
var tries = 0 let contentLength =
while tries <= 10 and res.headers.hasKey("transfer-encoding"): if res.headers.hasKey("content-length"):
await sleepAsync(100 + tries * 200) res.headers["content-length", 0]
res = await c.get(url) else:
tries.inc ""
let contentLength = res.getContentLength let headers = newHttpHeaders({
if contentLength.len > 0: "Content-Type": res.headers["content-type", 0],
headers.add ("content-length", contentLength) "Content-Length": contentLength,
"Cache-Control": maxAge,
"ETag": hashed
})
if res.headers.hasKey("content-range"): respond(request, headers)
headers.add ("content-range", $res.headers.getOrDefault("content-range"))
respond(request, Http206, headers)
else:
respond(request, Http200, headers)
var (hasValue, data) = (true, "") var (hasValue, data) = (true, "")
while hasValue: while hasValue:
(hasValue, data) = await res.bodyStream.read() (hasValue, data) = await res.bodyStream.read()
if hasValue: if hasValue:
await request.client.send(data, flags={}) await request.client.send(data)
data.setLen 0 data.setLen 0
except OSError: discard except HttpRequestError, ProtocolError, OSError:
except ProtocolError, HttpRequestError:
result = Http404 result = Http404
finally: finally:
c.close() client.close()
template check*(c): untyped = template check*(code): untyped =
let code = c
if code != Http200: if code != Http200:
resp code resp code
else: else:
@@ -112,27 +83,23 @@ proc decoded*(req: jester.Request; index: int): string =
if based: decode(encoded) if based: decode(encoded)
else: decodeUrl(encoded) else: decodeUrl(encoded)
proc getPicUrl*(req: jester.Request): string =
result = decoded(req, 1)
if "twimg.com" notin result:
result.insert(twimg)
if not result.startsWith(https):
result.insert(https)
proc createMediaRouter*(cfg: Config) = proc createMediaRouter*(cfg: Config) =
router media: router media:
get "/pic/?": get "/pic/?":
resp Http404 resp Http404
get re"^\/pic\/orig\/(enc)?\/?(.+)":
let url = getPicUrl(request)
cond isTwitterUrl(parseUri(url)) == true
check await proxyMedia(request, url & "?name=orig")
get re"^\/pic\/(enc)?\/?(.+)": get re"^\/pic\/(enc)?\/?(.+)":
let url = getPicUrl(request) var url = decoded(request, 1)
cond isTwitterUrl(parseUri(url)) == true if "twimg.com" notin url:
check await proxyMedia(request, url) url.insert(twimg)
if not url.startsWith(https):
url.insert(https)
let uri = parseUri(url)
cond isTwitterUrl(uri) == true
let code = await proxyMedia(request, url)
check code
get re"^\/video\/(enc)?\/?(.+)\/(.+)$": get re"^\/video\/(enc)?\/?(.+)\/(.+)$":
let url = decoded(request, 2) let url = decoded(request, 2)
@@ -142,7 +109,8 @@ proc createMediaRouter*(cfg: Config) =
resp showError("Failed to verify signature", cfg) resp showError("Failed to verify signature", cfg)
if ".mp4" in url or ".ts" in url or ".m4s" in url: if ".mp4" in url or ".ts" in url or ".m4s" in url:
check await proxyMedia(request, url) let code = await proxyMedia(request, url)
check code
var content: string var content: string
if ".vmap" in url: if ".vmap" in url:

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, tables, times, hashes, uri import asyncdispatch, strutils, tables, times, hashes, uri
import jester import jester
@@ -10,11 +10,6 @@ include "../views/rss.nimf"
export times, hashes export times, hashes
proc redisKey*(page, name, cursor: string): string =
result = page & ":" & name
if cursor.len > 0:
result &= ":" & cursor
proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.} = proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.} =
var profile: Profile var profile: Profile
let let
@@ -28,7 +23,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
var q = query var q = query
q.fromUser = names q.fromUser = names
profile = Profile( profile = Profile(
tweets: await getGraphSearch(q, after), tweets: await getSearch[Tweet](q, after),
# this is kinda dumb # this is kinda dumb
user: User( user: User(
username: name, username: name,
@@ -47,8 +42,8 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
template respRss*(rss, page) = template respRss*(rss, page) =
if rss.cursor.len == 0: if rss.cursor.len == 0:
let info = case page let info = case page
of "User": " \"" & @"name" & "\" " of "User": " \"$1\" " % @"name"
of "List": " \"" & @"id" & "\" " of "List": " $1 " % @"id"
else: " " else: " "
resp Http404, showError(page & info & "not found", cfg) resp Http404, showError(page & info & "not found", cfg)
@@ -72,13 +67,13 @@ proc createRssRouter*(cfg: Config) =
let let
cursor = getCursor() cursor = getCursor()
key = redisKey("search", $hash(genQueryUrl(query)), cursor) key = "search:" & $hash(genQueryUrl(query)) & ":" & cursor
var rss = await getCachedRss(key) var rss = await getCachedRss(key)
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "Search") respRss(rss, "Search")
let tweets = await getGraphSearch(query, cursor) let tweets = await getSearch[Tweet](query, cursor)
rss.cursor = tweets.bottom rss.cursor = tweets.bottom
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg) rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)
@@ -89,8 +84,9 @@ proc createRssRouter*(cfg: Config) =
cond cfg.enableRss cond cfg.enableRss
cond '.' notin @"name" cond '.' notin @"name"
let let
cursor = getCursor()
name = @"name" name = @"name"
key = redisKey("twitter", name, getCursor()) key = "twitter:" & name & ":" & cursor
var rss = await getCachedRss(key) var rss = await getCachedRss(key)
if rss.cursor.len > 0: if rss.cursor.len > 0:
@@ -105,20 +101,18 @@ proc createRssRouter*(cfg: Config) =
cond cfg.enableRss cond cfg.enableRss
cond '.' notin @"name" cond '.' notin @"name"
cond @"tab" in ["with_replies", "media", "search"] cond @"tab" in ["with_replies", "media", "search"]
let let name = @"name"
name = @"name" let query =
tab = @"tab" case @"tab"
query = of "with_replies": getReplyQuery(name)
case tab of "media": getMediaQuery(name)
of "with_replies": getReplyQuery(name) of "search": initQuery(params(request), name=name)
of "media": getMediaQuery(name) else: Query(fromUser: @[name])
of "search": initQuery(params(request), name=name)
else: Query(fromUser: @[name])
let searchKey = if tab != "search": "" var key = @"tab" & ":" & @"name" & ":"
else: ":" & $hash(genQueryUrl(query)) if @"tab" == "search":
key &= $hash(genQueryUrl(query)) & ":"
let key = redisKey(tab, name & searchKey, getCursor()) key &= getCursor()
var rss = await getCachedRss(key) var rss = await getCachedRss(key)
if rss.cursor.len > 0: if rss.cursor.len > 0:
@@ -149,17 +143,18 @@ proc createRssRouter*(cfg: Config) =
get "/i/lists/@id/rss": get "/i/lists/@id/rss":
cond cfg.enableRss cond cfg.enableRss
let let
id = @"id"
cursor = getCursor() cursor = getCursor()
key = redisKey("lists", id, cursor) key =
if cursor.len == 0: "lists:" & @"id"
else: "lists:" & @"id" & ":" & cursor
var rss = await getCachedRss(key) var rss = await getCachedRss(key)
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "List") respRss(rss, "List")
let let
list = await getCachedList(id=id) list = await getCachedList(id=(@"id"))
timeline = await getGraphListTweets(list.id, cursor) timeline = await getListTimeline(list.id, cursor)
rss.cursor = timeline.bottom rss.cursor = timeline.bottom
rss.feed = renderListRss(timeline.content, list, cfg) rss.feed = renderListRss(timeline.content, list, cfg)

View File

@@ -14,31 +14,25 @@ export search
proc createSearchRouter*(cfg: Config) = proc createSearchRouter*(cfg: Config) =
router search: router search:
get "/search/?": get "/search/?":
let q = @"q" if @"q".len > 500:
if q.len > 500:
resp Http400, showError("Search input too long.", cfg) resp Http400, showError("Search input too long.", cfg)
let let
prefs = cookiePrefs() prefs = cookiePrefs()
query = initQuery(params(request)) query = initQuery(params(request))
title = "Search" & (if q.len > 0: " (" & q & ")" else: "")
case query.kind case query.kind
of users: of users:
if "," in q: if "," in @"q":
redirect("/" & q) redirect("/" & @"q")
var users: Result[User] let users = await getSearch[User](query, getCursor())
try: resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs)
users = await getUserSearch(query, getCursor())
except InternalError:
users = Result[User](beginning: true, query: query)
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
of tweets: of tweets:
let let
tweets = await getGraphSearch(query, getCursor()) tweets = await getSearch[Tweet](query, getCursor())
rss = "/search/rss?" & genQueryUrl(query) rss = "/search/rss?" & genQueryUrl(query)
resp renderMain(renderTweetSearch(tweets, prefs, getPath()), resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
request, cfg, prefs, title, rss=rss) request, cfg, prefs, rss=rss)
else: else:
resp Http404, showError("Invalid search", cfg) resp Http404, showError("Invalid search", cfg)
@@ -48,4 +42,4 @@ proc createSearchRouter*(cfg: Config) =
get "/opensearch": get "/opensearch":
let url = getUrlPrefix(cfg) & "/search?q=" let url = getUrlPrefix(cfg) & "/search?q="
resp Http200, {"Content-Type": "application/opensearchdescription+xml"}, resp Http200, {"Content-Type": "application/opensearchdescription+xml"},
generateOpenSearchXML(cfg.title, cfg.hostname, url) generateOpenSearchXML(cfg.title, cfg.hostname, url)

View File

@@ -16,21 +16,17 @@ proc createStatusRouter*(cfg: Config) =
router status: router status:
get "/@name/status/@id/?": get "/@name/status/@id/?":
cond '.' notin @"name" cond '.' notin @"name"
let id = @"id" cond not @"id".any(c => not c.isDigit)
if id.len > 19 or id.any(c => not c.isDigit):
resp Http404, showError("Invalid tweet ID", cfg)
let prefs = cookiePrefs() let prefs = cookiePrefs()
# used for the infinite scroll feature # used for the infinite scroll feature
if @"scroll".len > 0: if @"scroll".len > 0:
let replies = await getReplies(id, getCursor()) let replies = await getReplies(@"id", getCursor())
if replies.content.len == 0: if replies.content.len == 0:
resp Http404, "" resp Http404, ""
resp $renderReplies(replies, prefs, getPath()) resp $renderReplies(replies, prefs, getPath())
let conv = await getTweet(id, getCursor()) let conv = await getTweet(@"id", getCursor())
if conv == nil: if conv == nil:
echo "nil conv" echo "nil conv"
@@ -76,6 +72,3 @@ proc createStatusRouter*(cfg: Config) =
get "/i/web/status/@id": get "/i/web/status/@id":
redirect("/i/status/" & @"id") redirect("/i/status/" & @"id")
get "/@name/thread/@id/?":
redirect("/$1/status/$2" % [@"name", @"id"])

View File

@@ -47,10 +47,10 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
let let
timeline = timeline =
case query.kind case query.kind
of posts: getGraphUserTweets(userId, TimelineKind.tweets, after) of posts: getTimeline(userId, after)
of replies: getGraphUserTweets(userId, TimelineKind.replies, after) of replies: getTimeline(userId, after, replies=true)
of media: getGraphUserTweets(userId, TimelineKind.media, after) of media: getMediaTimeline(userId, after)
else: getGraphSearch(query, after) else: getSearch[Tweet](query, after)
rail = rail =
skipIf(skipRail or query.kind == media, @[]): skipIf(skipRail or query.kind == media, @[]):
@@ -64,7 +64,6 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
let tweet = await getCachedTweet(user.pinnedTweet) let tweet = await getCachedTweet(user.pinnedTweet)
if not tweet.isNil: if not tweet.isNil:
tweet.pinned = true tweet.pinned = true
tweet.user = user
pinned = some tweet pinned = some tweet
result = Profile( result = Profile(
@@ -83,7 +82,7 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} = rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1: if query.fromUser.len != 1:
let let
timeline = await getGraphSearch(query, after) timeline = await getSearch[Tweet](query, after)
html = renderTweetSearch(timeline, prefs, getPath()) html = renderTweetSearch(timeline, prefs, getPath())
return renderMain(html, request, cfg, prefs, "Multi", rss=rss) return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
@@ -124,7 +123,7 @@ proc createTimelineRouter*(cfg: Config) =
get "/@name/?@tab?/?": get "/@name/?@tab?/?":
cond '.' notin @"name" cond '.' notin @"name"
cond @"name" notin ["pic", "gif", "video", "search", "settings", "login", "intent", "i"] cond @"name" notin ["pic", "gif", "video"]
cond @"tab" in ["with_replies", "media", "search", ""] cond @"tab" in ["with_replies", "media", "search", ""]
let let
prefs = cookiePrefs() prefs = cookiePrefs()
@@ -138,7 +137,7 @@ proc createTimelineRouter*(cfg: Config) =
# used for the infinite scroll feature # used for the infinite scroll feature
if @"scroll".len > 0: if @"scroll".len > 0:
if query.fromUser.len != 1: if query.fromUser.len != 1:
var timeline = await getGraphSearch(query, after) var timeline = await getSearch[Tweet](query, after)
if timeline.content.len == 0: resp Http404 if timeline.content.len == 0: resp Http404
timeline.beginning = true timeline.beginning = true
resp $renderTweetSearch(timeline, prefs, getPath()) resp $renderTweetSearch(timeline, prefs, getPath())

View File

@@ -73,9 +73,9 @@
} }
} }
.profile-joindate, .profile-location, .profile-website { .profile-joindate, .profile-location, profile-website {
color: var(--fg_faded); color: var(--fg_faded);
margin: 1px 0; margin: 2px 0;
width: 100%; width: 100%;
} }
} }

View File

@@ -98,9 +98,10 @@
} }
.avatar { .avatar {
position: absolute;
&.round { &.round {
border-radius: 50%; border-radius: 50%;
-webkit-user-select: none;
} }
&.mini { &.mini {
@@ -120,22 +121,14 @@
background-color: var(--bg_panel); background-color: var(--bg_panel);
.tweet-content { .tweet-content {
font-size: 18px; font-size: 18px
} }
.tweet-body { .tweet-body {
display: flex; display: flex;
flex-direction: column; flex-direction: column;
max-height: calc(100vh - 0.75em * 2); max-height: calc(100vh - 0.75em * 2);
} }
.card-image img {
height: auto;
}
.avatar {
position: absolute;
}
} }
.attribution { .attribution {
@@ -200,7 +193,6 @@
.tweet-stats { .tweet-stats {
margin-bottom: -3px; margin-bottom: -3px;
-webkit-user-select: none;
} }
.tweet-stat { .tweet-stat {
@@ -232,7 +224,6 @@
left: 0; left: 0;
top: 0; top: 0;
position: absolute; position: absolute;
-webkit-user-select: none;
&:hover { &:hover {
background-color: var(--bg_hover); background-color: var(--bg_hover);

View File

@@ -23,6 +23,7 @@
font-size: 18px; font-size: 18px;
} }
@media(max-width: 600px) { @media(max-width: 600px) {
.main-tweet .tweet-content { .main-tweet .tweet-content {
font-size: 16px; font-size: 16px;

View File

@@ -3,7 +3,7 @@
video { video {
max-height: 100%; max-height: 100%;
width: 100%; max-width: 100%;
} }
.gallery-video { .gallery-video {

View File

@@ -1,7 +1,8 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, times, sequtils, json, random import asyncdispatch, httpclient, times, sequtils, json, random
import strutils, tables import strutils, tables
import types, consts import zippy
import types, consts, http_pool
const const
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
@@ -10,14 +11,9 @@ const
failDelay = initDuration(minutes=30) failDelay = initDuration(minutes=30)
var var
clientPool: HttpPool
tokenPool: seq[Token] tokenPool: seq[Token]
lastFailed: Time lastFailed: Time
enableLogging = false
let headers = newHttpHeaders({"authorization": auth})
template log(str) =
if enableLogging: echo "[tokens] ", str
proc getPoolJson*(): JsonNode = proc getPoolJson*(): JsonNode =
var var
@@ -41,12 +37,9 @@ proc getPoolJson*(): JsonNode =
let let
maxReqs = maxReqs =
case api case api
of Api.listMembers, Api.listBySlug, Api.list, Api.userRestId: 500
of Api.timeline: 187 of Api.timeline: 187
of Api.listMembers, Api.listBySlug, Api.list, Api.listTweets, else: 180
Api.userTweets, Api.userTweetsAndReplies, Api.userMedia,
Api.userRestId, Api.userScreenName,
Api.tweetDetail, Api.tweetResult, Api.search: 500
of Api.userSearch: 900
reqs = maxReqs - token.apis[api].remaining reqs = maxReqs - token.apis[api].remaining
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
@@ -67,23 +60,25 @@ proc fetchToken(): Future[Token] {.async.} =
if getTime() - lastFailed < failDelay: if getTime() - lastFailed < failDelay:
raise rateLimitError() raise rateLimitError()
let client = newAsyncHttpClient(headers=headers) let headers = newHttpHeaders({
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"accept-encoding": "gzip",
"accept-language": "en-US,en;q=0.5",
"connection": "keep-alive",
"authorization": auth
})
try: try:
let let
resp = await client.postContent(activate) resp = clientPool.use(headers): await c.postContent(activate)
tokNode = parseJson(resp)["guest_token"] tokNode = parseJson(uncompress(resp))["guest_token"]
tok = tokNode.getStr($(tokNode.getInt)) tok = tokNode.getStr($(tokNode.getInt))
time = getTime() time = getTime()
return Token(tok: tok, init: time, lastUse: time) return Token(tok: tok, init: time, lastUse: time)
except Exception as e: except Exception as e:
echo "[tokens] fetching token failed: ", e.msg lastFailed = getTime()
if "Try again" notin e.msg: echo "fetching token failed: ", e.msg
echo "[tokens] fetching tokens paused, resuming in 30 minutes"
lastFailed = getTime()
finally:
client.close()
proc expired(token: Token): bool = proc expired(token: Token): bool =
let time = getTime() let time = getTime()
@@ -105,9 +100,6 @@ proc isReady(token: Token; api: Api): bool =
proc release*(token: Token; used=false; invalid=false) = proc release*(token: Token; used=false; invalid=false) =
if token.isNil: return if token.isNil: return
if invalid or token.expired: if invalid or token.expired:
if invalid: log "discarding invalid token"
elif token.expired: log "discarding expired token"
let idx = tokenPool.find(token) let idx = tokenPool.find(token)
if idx > -1: tokenPool.delete(idx) if idx > -1: tokenPool.delete(idx)
elif used: elif used:
@@ -123,7 +115,6 @@ proc getToken*(api: Api): Future[Token] {.async.} =
if not result.isReady(api): if not result.isReady(api):
release(result) release(result)
result = await fetchToken() result = await fetchToken()
log "added new token to pool"
tokenPool.add result tokenPool.add result
if not result.isNil: if not result.isNil:
@@ -152,11 +143,10 @@ proc poolTokens*(amount: int) {.async.} =
except: discard except: discard
if not newToken.isNil: if not newToken.isNil:
log "added new token to pool"
tokenPool.add newToken tokenPool.add newToken
proc initTokenPool*(cfg: Config) {.async.} = proc initTokenPool*(cfg: Config) {.async.} =
enableLogging = cfg.enableDebug clientPool = HttpPool()
while true: while true:
if tokenPool.countIt(not it.isLimited(Api.timeline)) < cfg.minTokens: if tokenPool.countIt(not it.isLimited(Api.timeline)) < cfg.minTokens:

View File

@@ -7,28 +7,17 @@ genPrefsType()
type type
RateLimitError* = object of CatchableError RateLimitError* = object of CatchableError
InternalError* = object of CatchableError InternalError* = object of CatchableError
BadClientError* = object of CatchableError
TimelineKind* {.pure.} = enum
tweets
replies
media
Api* {.pure.} = enum Api* {.pure.} = enum
tweetDetail userShow
tweetResult
timeline timeline
search search
userSearch tweet
list list
listBySlug listBySlug
listMembers listMembers
listTweets
userRestId userRestId
userScreenName status
userTweets
userTweetsAndReplies
userMedia
RateLimit* = object RateLimit* = object
remaining*: int remaining*: int
@@ -45,22 +34,17 @@ type
null = 0 null = 0
noUserMatches = 17 noUserMatches = 17
protectedUser = 22 protectedUser = 22
missingParams = 25
couldntAuth = 32 couldntAuth = 32
doesntExist = 34 doesntExist = 34
invalidParam = 47
userNotFound = 50 userNotFound = 50
suspended = 63 suspended = 63
rateLimited = 88 rateLimited = 88
invalidToken = 89 invalidToken = 89
listIdOrSlug = 112 listIdOrSlug = 112
tweetNotFound = 144 tweetNotFound = 144
tweetNotAuthorized = 179
forbidden = 200 forbidden = 200
badToken = 239 badToken = 239
noCsrf = 353 noCsrf = 353
tweetUnavailable = 421
tweetCensored = 422
User* = object User* = object
id*: string id*: string
@@ -91,7 +75,6 @@ type
contentType*: VideoType contentType*: VideoType
url*: string url*: string
bitrate*: int bitrate*: int
resolution*: int
Video* = object Video* = object
durationMs*: int durationMs*: int
@@ -161,7 +144,6 @@ type
imageDirectMessage = "image_direct_message" imageDirectMessage = "image_direct_message"
audiospace = "audiospace" audiospace = "audiospace"
newsletterPublication = "newsletter_publication" newsletterPublication = "newsletter_publication"
hidden
unknown unknown
Card* = object Card* = object
@@ -192,8 +174,6 @@ type
available*: bool available*: bool
tombstone*: string tombstone*: string
location*: string location*: string
# Unused, needed for backwards compat
source*: string
stats*: TweetStats stats*: TweetStats
retweet*: Option[Tweet] retweet*: Option[Tweet]
attribution*: Option[User] attribution*: Option[User]

View File

@@ -1,7 +1,6 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat, uri, tables, base64 import strutils, strformat, uri, tables, base64
import nimcrypto import nimcrypto
import types
var var
hmacKey: string hmacKey: string
@@ -29,20 +28,6 @@ proc setProxyEncoding*(state: bool) =
proc getHmac*(data: string): string = proc getHmac*(data: string): string =
($hmac(sha256, hmacKey, data))[0 .. 12] ($hmac(sha256, hmacKey, data))[0 .. 12]
proc getBestMp4VidVariant(video: Video): VideoVariant =
for v in video.variants:
if v.bitrate >= result.bitrate:
result = v
proc getVidVariant*(video: Video; playbackType: VideoType): VideoVariant =
case playbackType
of mp4:
return video.getBestMp4VidVariant
of m3u8, vmap:
for variant in video.variants:
if variant.contentType == playbackType:
return variant
proc getVidUrl*(link: string): string = proc getVidUrl*(link: string): string =
if link.len == 0: return if link.len == 0: return
let sig = getHmac(link) let sig = getHmac(link)
@@ -57,12 +42,6 @@ proc getPicUrl*(link: string): string =
else: else:
&"/pic/{encodeUrl(link)}" &"/pic/{encodeUrl(link)}"
proc getOrigPicUrl*(link: string): string =
if base64Media:
&"/pic/orig/enc/{encode(link, safe=true)}"
else:
&"/pic/orig/{encodeUrl(link)}"
proc filterParams*(params: Table): seq[(string, string)] = proc filterParams*(params: Table): seq[(string, string)] =
for p in params.pairs(): for p in params.pairs():
if p[1].len > 0 and p[0] notin nitterParams: if p[1].len > 0 and p[0] notin nitterParams:

View File

@@ -15,8 +15,7 @@ proc renderVideoEmbed*(tweet: Tweet; cfg: Config; req: Request): string =
let node = buildHtml(html(lang="en")): let node = buildHtml(html(lang="en")):
renderHead(prefs, cfg, req, video=vidUrl, images=(@[thumb])) renderHead(prefs, cfg, req, video=vidUrl, images=(@[thumb]))
body: tdiv(class="embed-video"):
tdiv(class="embed-video"): renderVideo(get(tweet.video), prefs, "")
renderVideo(get(tweet.video), prefs, "")
result = doctype & $node result = doctype & $node

View File

@@ -52,7 +52,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
let opensearchUrl = getUrlPrefix(cfg) & "/opensearch" let opensearchUrl = getUrlPrefix(cfg) & "/opensearch"
buildHtml(head): buildHtml(head):
link(rel="stylesheet", type="text/css", href="/css/style.css?v=18") link(rel="stylesheet", type="text/css", href="/css/style.css?v=16")
link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2") link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2")
if theme.len > 0: if theme.len > 0:
@@ -93,13 +93,14 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
meta(property="og:site_name", content="Nitter") meta(property="og:site_name", content="Nitter")
meta(property="og:locale", content="en_US") meta(property="og:locale", content="en_US")
if banner.len > 0 and not banner.startsWith('#'): if banner.len > 0:
let bannerUrl = getPicUrl(banner) let bannerUrl = getPicUrl(banner)
link(rel="preload", type="image/png", href=bannerUrl, `as`="image") link(rel="preload", type="image/png", href=bannerUrl, `as`="image")
for url in images: for url in images:
let preloadUrl = if "400x400" in url: getPicUrl(url) let suffix = if "400x400" in url or url.endsWith("placeholder.png"): ""
else: getSmallPic(url) else: "?name=small"
let preloadUrl = getPicUrl(url & suffix)
link(rel="preload", type="image/png", href=preloadUrl, `as`="image") link(rel="preload", type="image/png", href=preloadUrl, `as`="image")
let image = getUrlPrefix(cfg) & getPicUrl(url) let image = getUrlPrefix(cfg) & getPicUrl(url)

View File

@@ -50,7 +50,7 @@ proc renderUserCard*(user: User; prefs: Prefs): VNode =
span: span:
let url = replaceUrls(user.website, prefs) let url = replaceUrls(user.website, prefs)
icon "link" icon "link"
a(href=url): text url.shortLink a(href=url): text shortLink(url)
tdiv(class="profile-joindate"): tdiv(class="profile-joindate"):
span(title=getJoinDateFull(user)): span(title=getJoinDateFull(user)):
@@ -78,11 +78,8 @@ proc renderPhotoRail(profile: Profile): VNode =
tdiv(class="photo-rail-grid"): tdiv(class="photo-rail-grid"):
for i, photo in profile.photoRail: for i, photo in profile.photoRail:
if i == 16: break if i == 16: break
let photoSuffix =
if "format" in photo.url or "placeholder" in photo.url: ""
else: ":thumb"
a(href=(&"/{profile.user.username}/status/{photo.tweetId}#m")): a(href=(&"/{profile.user.username}/status/{photo.tweetId}#m")):
genImg(photo.url & photoSuffix) genImg(photo.url & (if "format" in photo.url: "" else: ":thumb"))
proc renderBanner(banner: string): VNode = proc renderBanner(banner: string): VNode =
buildHtml(): buildHtml():
@@ -108,7 +105,7 @@ proc renderProfile*(profile: var Profile; prefs: Prefs; path: string): VNode =
renderBanner(profile.user.banner) renderBanner(profile.user.banner)
let sticky = if prefs.stickyProfile: " sticky" else: "" let sticky = if prefs.stickyProfile: " sticky" else: ""
tdiv(class=("profile-tab" & sticky)): tdiv(class=(&"profile-tab{sticky}")):
renderUserCard(profile.user, prefs) renderUserCard(profile.user, prefs)
if profile.photoRail.len > 0: if profile.photoRail.len > 0:
renderPhotoRail(profile) renderPhotoRail(profile)

View File

@@ -1,19 +1,11 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import strutils, strformat import strutils
import karax/[karaxdsl, vdom, vstyles] import karax/[karaxdsl, vdom, vstyles]
import ".."/[types, utils] import ".."/[types, utils]
const smallWebp* = "?name=small&format=webp"
proc getSmallPic*(url: string): string =
result = url
if "?" notin url and not url.endsWith("placeholder.png"):
result &= smallWebp
result = getPicUrl(result)
proc icon*(icon: string; text=""; title=""; class=""; href=""): VNode = proc icon*(icon: string; text=""; title=""; class=""; href=""): VNode =
var c = "icon-" & icon var c = "icon-" & icon
if class.len > 0: c = &"{c} {class}" if class.len > 0: c = c & " " & class
buildHtml(tdiv(class="icon-container")): buildHtml(tdiv(class="icon-container")):
if href.len > 0: if href.len > 0:
a(class=c, title=title, href=href) a(class=c, title=title, href=href)
@@ -59,23 +51,29 @@ proc buttonReferer*(action, text, path: string; class=""; `method`="post"): VNod
proc genCheckbox*(pref, label: string; state: bool): VNode = proc genCheckbox*(pref, label: string; state: bool): VNode =
buildHtml(label(class="pref-group checkbox-container")): buildHtml(label(class="pref-group checkbox-container")):
text label text label
input(name=pref, `type`="checkbox", checked=state) if state: input(name=pref, `type`="checkbox", checked="")
else: input(name=pref, `type`="checkbox")
span(class="checkbox") span(class="checkbox")
proc genInput*(pref, label, state, placeholder: string; class=""; autofocus=true): VNode = proc genInput*(pref, label, state, placeholder: string; class=""): VNode =
let p = placeholder let p = placeholder
buildHtml(tdiv(class=("pref-group pref-input " & class))): buildHtml(tdiv(class=("pref-group pref-input " & class))):
if label.len > 0: if label.len > 0:
label(`for`=pref): text label label(`for`=pref): text label
input(name=pref, `type`="text", placeholder=p, value=state, autofocus=(autofocus and state.len == 0)) if state.len == 0:
input(name=pref, `type`="text", placeholder=p, value=state, autofocus="")
else:
input(name=pref, `type`="text", placeholder=p, value=state)
proc genSelect*(pref, label, state: string; options: seq[string]): VNode = proc genSelect*(pref, label, state: string; options: seq[string]): VNode =
buildHtml(tdiv(class="pref-group pref-input")): buildHtml(tdiv(class="pref-group pref-input")):
label(`for`=pref): text label label(`for`=pref): text label
select(name=pref): select(name=pref):
for opt in options: for opt in options:
option(value=opt, selected=(opt == state)): if opt == state:
text opt option(value=opt, selected=""): text opt
else:
option(value=opt): text opt
proc genDate*(pref, state: string): VNode = proc genDate*(pref, state: string): VNode =
buildHtml(span(class="date-input")): buildHtml(span(class="date-input")):
@@ -84,12 +82,15 @@ proc genDate*(pref, state: string): VNode =
proc genImg*(url: string; class=""): VNode = proc genImg*(url: string; class=""): VNode =
buildHtml(): buildHtml():
img(src=getPicUrl(url), class=class, alt="", loading="lazy", decoding="async") img(src=getPicUrl(url), class=class, alt="")
proc getTabClass*(query: Query; tab: QueryKind): string = proc getTabClass*(query: Query; tab: QueryKind): string =
if query.kind == tab: "tab-item active" result = "tab-item"
else: "tab-item" if query.kind == tab:
result &= " active"
proc getAvatarClass*(prefs: Prefs): string = proc getAvatarClass*(prefs: Prefs): string =
if prefs.squareAvatars: "avatar" if prefs.squareAvatars:
else: "avatar round" "avatar"
else:
"avatar round"

View File

@@ -47,7 +47,7 @@ Twitter feed for: ${desc}. Generated by ${cfg.hostname}
# let thumb = &"{urlPrefix}{getPicUrl(get(tweet.gif).thumb)}" # let thumb = &"{urlPrefix}{getPicUrl(get(tweet.gif).thumb)}"
# let url = &"{urlPrefix}{getPicUrl(get(tweet.gif).url)}" # let url = &"{urlPrefix}{getPicUrl(get(tweet.gif).url)}"
<video poster="${thumb}" autoplay muted loop style="max-width:250px;"> <video poster="${thumb}" autoplay muted loop style="max-width:250px;">
<source src="${url}" type="video/mp4"></video> <source src="${url}" type="video/mp4"</source></video>
#elif tweet.card.isSome: #elif tweet.card.isSome:
# let card = tweet.card.get() # let card = tweet.card.get()
# if card.image.len > 0: # if card.image.len > 0:
@@ -117,7 +117,7 @@ ${renderRssTweets(profile.tweets.content, cfg)}
<atom:link href="${link}" rel="self" type="application/rss+xml" /> <atom:link href="${link}" rel="self" type="application/rss+xml" />
<title>${xmltree.escape(list.name)} / @${list.username}</title> <title>${xmltree.escape(list.name)} / @${list.username}</title>
<link>${link}</link> <link>${link}</link>
<description>${getDescription(&"{list.name} by @{list.username}", cfg)}</description> <description>${getDescription(list.name & " by @" & list.username, cfg)}</description>
<language>en-us</language> <language>en-us</language>
<ttl>40</ttl> <ttl>40</ttl>
${renderRssTweets(tweets, cfg)} ${renderRssTweets(tweets, cfg)}
@@ -135,7 +135,7 @@ ${renderRssTweets(tweets, cfg)}
<atom:link href="${link}" rel="self" type="application/rss+xml" /> <atom:link href="${link}" rel="self" type="application/rss+xml" />
<title>Search results for "${escName}"</title> <title>Search results for "${escName}"</title>
<link>${link}</link> <link>${link}</link>
<description>${getDescription(&"Search \"{escName}\"", cfg)}</description> <description>${getDescription("Search \"" & escName & "\"", cfg)}</description>
<language>en-us</language> <language>en-us</language>
<ttl>40</ttl> <ttl>40</ttl>
${renderRssTweets(tweets, cfg)} ${renderRssTweets(tweets, cfg)}

View File

@@ -63,10 +63,12 @@ proc renderSearchPanel*(query: Query): VNode =
hiddenField("f", "tweets") hiddenField("f", "tweets")
genInput("q", "", query.text, "Enter search...", class="pref-inline") genInput("q", "", query.text, "Enter search...", class="pref-inline")
button(`type`="submit"): icon "search" button(`type`="submit"): icon "search"
if isPanelOpen(query):
input(id="search-panel-toggle", `type`="checkbox", checked=isPanelOpen(query)) input(id="search-panel-toggle", `type`="checkbox", checked="")
label(`for`="search-panel-toggle"): icon "down" else:
input(id="search-panel-toggle", `type`="checkbox")
label(`for`="search-panel-toggle"):
icon "down"
tdiv(class="search-panel"): tdiv(class="search-panel"):
for f in @["filter", "exclude"]: for f in @["filter", "exclude"]:
span(class="search-title"): text capitalize(f) span(class="search-title"): text capitalize(f)
@@ -86,7 +88,7 @@ proc renderSearchPanel*(query: Query): VNode =
genDate("until", query.until) genDate("until", query.until)
tdiv: tdiv:
span(class="search-title"): text "Near" span(class="search-title"): text "Near"
genInput("near", "", query.near, "Location...", autofocus=false) genInput("near", "", query.near, placeholder="Location...")
proc renderTweetSearch*(results: Result[Tweet]; prefs: Prefs; path: string; proc renderTweetSearch*(results: Result[Tweet]; prefs: Prefs; path: string;
pinned=none(Tweet)): VNode = pinned=none(Tweet)): VNode =

View File

@@ -7,12 +7,16 @@ import renderutils
import ".."/[types, utils, formatters] import ".."/[types, utils, formatters]
import general import general
const doctype = "<!DOCTYPE html>\n" proc getSmallPic(url: string): string =
result = url
if "?" notin url and not url.endsWith("placeholder.png"):
result &= ":small"
result = getPicUrl(result)
proc renderMiniAvatar(user: User; prefs: Prefs): VNode = proc renderMiniAvatar(user: User; prefs: Prefs): VNode =
let url = getPicUrl(user.getUserPic("_mini")) let url = getPicUrl(user.getUserPic("_mini"))
buildHtml(): buildHtml():
img(class=(prefs.getAvatarClass & " mini"), src=url, loading="lazy") img(class=(prefs.getAvatarClass & " mini"), src=url)
proc renderHeader(tweet: Tweet; retweet: string; prefs: Prefs): VNode = proc renderHeader(tweet: Tweet; retweet: string; prefs: Prefs): VNode =
buildHtml(tdiv): buildHtml(tdiv):
@@ -53,21 +57,19 @@ proc renderAlbum(tweet: Tweet): VNode =
tdiv(class="attachment image"): tdiv(class="attachment image"):
let let
named = "name=" in photo named = "name=" in photo
small = if named: photo else: photo & smallWebp orig = if named: photo else: photo & "?name=orig"
a(href=getOrigPicUrl(photo), class="still-image", target="_blank"): small = if named: photo else: photo & "?name=small"
a(href=getPicUrl(orig), class="still-image", target="_blank"):
genImg(small) genImg(small)
proc isPlaybackEnabled(prefs: Prefs; playbackType: VideoType): bool = proc isPlaybackEnabled(prefs: Prefs; video: Video): bool =
case playbackType case video.playbackType
of mp4: prefs.mp4Playback of mp4: prefs.mp4Playback
of m3u8, vmap: prefs.hlsPlayback of m3u8, vmap: prefs.hlsPlayback
proc hasMp4Url(video: Video): bool = proc renderVideoDisabled(video: Video; path: string): VNode =
video.variants.anyIt(it.contentType == mp4)
proc renderVideoDisabled(playbackType: VideoType; path: string): VNode =
buildHtml(tdiv(class="video-overlay")): buildHtml(tdiv(class="video-overlay")):
case playbackType case video.playbackType
of mp4: of mp4:
p: text "mp4 playback disabled in preferences" p: text "mp4 playback disabled in preferences"
of m3u8, vmap: of m3u8, vmap:
@@ -82,38 +84,36 @@ proc renderVideoUnavailable(video: Video): VNode =
p: text "This media is unavailable" p: text "This media is unavailable"
proc renderVideo*(video: Video; prefs: Prefs; path: string): VNode = proc renderVideo*(video: Video; prefs: Prefs; path: string): VNode =
let let container =
container = if video.description.len == 0 and video.title.len == 0: "" if video.description.len > 0 or video.title.len > 0: " card-container"
else: " card-container" else: ""
playbackType = if prefs.proxyVideos and video.hasMp4Url: mp4
else: video.playbackType
buildHtml(tdiv(class="attachments card")): buildHtml(tdiv(class="attachments card")):
tdiv(class="gallery-video" & container): tdiv(class="gallery-video" & container):
tdiv(class="attachment video-container"): tdiv(class="attachment video-container"):
let thumb = getSmallPic(video.thumb) let thumb = getSmallPic(video.thumb)
let canPlay = prefs.isPlaybackEnabled(playbackType) if not video.available:
img(src=thumb)
if video.available and canPlay: renderVideoUnavailable(video)
let elif not prefs.isPlaybackEnabled(video):
vidUrl = video.getVidVariant(playbackType).url img(src=thumb)
source = if prefs.proxyVideos: getVidUrl(vidUrl) renderVideoDisabled(video, path)
else: vidUrl else:
case playbackType let vid = video.variants.filterIt(it.contentType == video.playbackType)
let source = getVidUrl(vid[0].url)
case video.playbackType
of mp4: of mp4:
video(src=source, poster=thumb, controls="", muted=prefs.muteVideos, preload="metadata") if prefs.muteVideos:
video(poster=thumb, controls="", muted=""):
source(src=source, `type`="video/mp4")
else:
video(poster=thumb, controls=""):
source(src=source, `type`="video/mp4")
of m3u8, vmap: of m3u8, vmap:
video(poster=thumb, data-url=source, data-autoload="false", muted=prefs.muteVideos) video(poster=thumb, data-url=source, data-autoload="false")
verbatim "<div class=\"video-overlay\" onclick=\"playVideo(this)\">" verbatim "<div class=\"video-overlay\" onclick=\"playVideo(this)\">"
tdiv(class="overlay-circle"): span(class="overlay-triangle") tdiv(class="overlay-circle"): span(class="overlay-triangle")
verbatim "</div>" verbatim "</div>"
else:
img(src=thumb, loading="lazy", decoding="async")
if not canPlay:
renderVideoDisabled(playbackType, path)
else:
renderVideoUnavailable(video)
if container.len > 0: if container.len > 0:
tdiv(class="card-content"): tdiv(class="card-content"):
h2(class="card-title"): text video.title h2(class="card-title"): text video.title
@@ -124,9 +124,14 @@ proc renderGif(gif: Gif; prefs: Prefs): VNode =
buildHtml(tdiv(class="attachments media-gif")): buildHtml(tdiv(class="attachments media-gif")):
tdiv(class="gallery-gif", style={maxHeight: "unset"}): tdiv(class="gallery-gif", style={maxHeight: "unset"}):
tdiv(class="attachment"): tdiv(class="attachment"):
video(class="gif", poster=getSmallPic(gif.thumb), autoplay=prefs.autoplayGifs, let thumb = getSmallPic(gif.thumb)
controls="", muted="", loop=""): let url = getPicUrl(gif.url)
source(src=getPicUrl(gif.url), `type`="video/mp4") if prefs.autoplayGifs:
video(class="gif", poster=thumb, controls="", autoplay="", muted="", loop=""):
source(src=url, `type`="video/mp4")
else:
video(class="gif", poster=thumb, controls="", muted="", loop=""):
source(src=url, `type`="video/mp4")
proc renderPoll(poll: Poll): VNode = proc renderPoll(poll: Poll): VNode =
buildHtml(tdiv(class="poll")): buildHtml(tdiv(class="poll")):
@@ -141,12 +146,12 @@ proc renderPoll(poll: Poll): VNode =
span(class="poll-choice-value"): text percStr span(class="poll-choice-value"): text percStr
span(class="poll-choice-option"): text poll.options[i] span(class="poll-choice-option"): text poll.options[i]
span(class="poll-info"): span(class="poll-info"):
text &"{insertSep($poll.votes, ',')} votes • {poll.status}" text insertSep($poll.votes, ',') & " votes • " & poll.status
proc renderCardImage(card: Card): VNode = proc renderCardImage(card: Card): VNode =
buildHtml(tdiv(class="card-image-container")): buildHtml(tdiv(class="card-image-container")):
tdiv(class="card-image"): tdiv(class="card-image"):
img(src=getPicUrl(card.image), alt="", loading="lazy") img(src=getPicUrl(card.image), alt="")
if card.kind == player: if card.kind == player:
tdiv(class="card-overlay"): tdiv(class="card-overlay"):
tdiv(class="overlay-circle"): tdiv(class="overlay-circle"):
@@ -320,7 +325,7 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
if tweet.attribution.isSome: if tweet.attribution.isSome:
renderAttribution(tweet.attribution.get(), prefs) renderAttribution(tweet.attribution.get(), prefs)
if tweet.card.isSome and tweet.card.get().kind != hidden: if tweet.card.isSome:
renderCard(tweet.card.get(), prefs, path) renderCard(tweet.card.get(), prefs, path)
if tweet.photos.len > 0: if tweet.photos.len > 0:
@@ -339,7 +344,7 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
renderQuote(tweet.quote.get(), prefs, path) renderQuote(tweet.quote.get(), prefs, path)
if mainTweet: if mainTweet:
p(class="tweet-published"): text &"{getTime(tweet)}" p(class="tweet-published"): text getTime(tweet)
if tweet.mediaTags.len > 0: if tweet.mediaTags.len > 0:
renderMediaTags(tweet.mediaTags) renderMediaTags(tweet.mediaTags)
@@ -351,12 +356,7 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
a(class="show-thread", href=("/i/status/" & $tweet.threadId)): a(class="show-thread", href=("/i/status/" & $tweet.threadId)):
text "Show this thread" text "Show this thread"
proc renderTweetEmbed*(tweet: Tweet; path: string; prefs: Prefs; cfg: Config; req: Request): string = proc renderTweetEmbed*(tweet: Tweet; path: string; prefs: Prefs; cfg: Config; req: Request): VNode =
let node = buildHtml(html(lang="en")): buildHtml(tdiv(class="tweet-embed")):
renderHead(prefs, cfg, req) renderHead(prefs, cfg, req)
renderTweet(tweet, prefs, path, mainTweet=true)
body:
tdiv(class="tweet-embed"):
renderTweet(tweet, prefs, path, mainTweet=true)
result = doctype & $node

View File

@@ -1 +0,0 @@
seleniumbase

View File

@@ -3,6 +3,11 @@ from parameterized import parameterized
card = [ card = [
['Thom_Wolf/status/1122466524860702729',
'pytorch/fairseq',
'Facebook AI Research Sequence-to-Sequence Toolkit written in Python. - GitHub - pytorch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in Python.',
'github.com', True],
['nim_lang/status/1136652293510717440', ['nim_lang/status/1136652293510717440',
'Version 0.20.0 released', 'Version 0.20.0 released',
'We are very proud to announce Nim version 0.20. This is a massive release, both literally and figuratively. It contains more than 1,000 commits and it marks our release candidate for version 1.0!', 'We are very proud to announce Nim version 0.20. This is a massive release, both literally and figuratively. It contains more than 1,000 commits and it marks our release candidate for version 1.0!',
@@ -20,10 +25,10 @@ card = [
] ]
no_thumb = [ no_thumb = [
['Thom_Wolf/status/1122466524860702729', ['Bountysource/status/1141879700639215617',
'facebookresearch/fairseq', 'Post a bounty on kivy/plyer!',
'Facebook AI Research Sequence-to-Sequence Toolkit written in Python. - GitHub - facebookresearch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in Python.', 'Automation and Screen Reader Support',
'github.com'], 'bountysource.com'],
['brent_p/status/1088857328680488961', ['brent_p/status/1088857328680488961',
'Hts Nim Sugar', 'Hts Nim Sugar',
@@ -35,9 +40,14 @@ no_thumb = [
'A sample of a Qt app written using mostly nim. Contribute to sinkingsugar/nimqt-example development by creating an account on GitHub.', 'A sample of a Qt app written using mostly nim. Contribute to sinkingsugar/nimqt-example development by creating an account on GitHub.',
'github.com'], 'github.com'],
['mobile_test/status/490378953744318464',
'Nantasket Beach',
'Explore this photo titled Nantasket Beach by Ben Sandofsky (@sandofsky) on 500px',
'500px.com'],
['nim_lang/status/1082989146040340480', ['nim_lang/status/1082989146040340480',
'Nim in 2018: A short recap', 'Nim in 2018: A short recap',
'Posted by u/miran1 - 36 votes and 46 comments', 'Posted in r/programming by u/miran1',
'reddit.com'] 'reddit.com']
] ]
@@ -71,7 +81,7 @@ class CardTest(BaseTestCase):
c = Card(Conversation.main + " ") c = Card(Conversation.main + " ")
self.assert_text(title, c.title) self.assert_text(title, c.title)
self.assert_text(destination, c.destination) self.assert_text(destination, c.destination)
self.assertIn('/pic/', self.get_image_url(c.image + ' img')) self.assertIn('_img', self.get_image_url(c.image + ' img'))
if len(description) > 0: if len(description) > 0:
self.assert_text(description, c.description) self.assert_text(description, c.description)
if large: if large:
@@ -94,7 +104,7 @@ class CardTest(BaseTestCase):
c = Card(Conversation.main + " ") c = Card(Conversation.main + " ")
self.assert_text(title, c.title) self.assert_text(title, c.title)
self.assert_text(destination, c.destination) self.assert_text(destination, c.destination)
self.assertIn('/pic/', self.get_image_url(c.image + ' img')) self.assertIn('_img', self.get_image_url(c.image + ' img'))
self.assert_element_visible('.card-overlay') self.assert_element_visible('.card-overlay')
if len(description) > 0: if len(description) > 0:
self.assert_text(description, c.description) self.assert_text(description, c.description)

View File

@@ -17,6 +17,11 @@ protected = [
invalid = [['thisprofiledoesntexist'], ['%']] invalid = [['thisprofiledoesntexist'], ['%']]
banner_color = [
['nim_lang', '22, 25, 32'],
['rustlang', '35, 31, 32']
]
banner_image = [ banner_image = [
['mobile_test', 'profile_banners%2F82135242%2F1384108037%2F1500x500'] ['mobile_test', 'profile_banners%2F82135242%2F1384108037%2F1500x500']
] ]
@@ -69,6 +74,12 @@ class ProfileTest(BaseTestCase):
self.open_nitter('user') self.open_nitter('user')
self.assert_text('User "user" has been suspended') self.assert_text('User "user" has been suspended')
@parameterized.expand(banner_color)
def test_banner_color(self, username, color):
self.open_nitter(username)
banner = self.find_element(Profile.banner + ' a')
self.assertIn(color, banner.value_of_css_property('background-color'))
@parameterized.expand(banner_image) @parameterized.expand(banner_image)
def test_banner_image(self, username, url): def test_banner_image(self, username, url):
self.open_nitter(username) self.open_nitter(username)

View File

@@ -3,18 +3,14 @@ from parameterized import parameterized
text = [ text = [
['elonmusk/status/1138136540096319488', ['elonmusk/status/1138136540096319488',
'TREV PAGE', '@Model3Owners', 'Tesla Owners Online', '@Model3Owners',
"""As of March 58.4% of new car sales in Norway are electric. """As of March 58.4% of new car sales in Norway are electric.
What are we doing wrong? reuters.com/article/us-norwa…"""], What are we doing wrong? reuters.com/article/us-norwa…"""],
['nim_lang/status/1491461266849808397#m', ['nim_lang/status/924694255364341760',
'Nim language', '@nim_lang', 'Hacker News', '@newsycombinator',
"""What's better than Nim 1.6.0? 'Why Rust fails hard at scientific computing andre-ratsimbazafy.com/why-r…']
Nim 1.6.2 :)
nim-lang.org/blog/2021/12/17…"""]
] ]
image = [ image = [

View File

@@ -16,7 +16,7 @@ timeline = [
] ]
status = [ status = [
[20, 'jack', 'jack', '21 Mar 2006', 'just setting up my twttr'], [20, 'jack⚡️', 'jack', '21 Mar 2006', 'just setting up my twttr'],
[134849778302464000, 'The Twoffice', 'TheTwoffice', '11 Nov 2011', 'test'], [134849778302464000, 'The Twoffice', 'TheTwoffice', '11 Nov 2011', 'test'],
[105685475985080322, 'The Twoffice', 'TheTwoffice', '22 Aug 2011', 'regular tweet'], [105685475985080322, 'The Twoffice', 'TheTwoffice', '22 Aug 2011', 'regular tweet'],
[572593440719912960, 'Test account', 'mobile_test', '3 Mar 2015', 'testing test'] [572593440719912960, 'Test account', 'mobile_test', '3 Mar 2015', 'testing test']
@@ -71,7 +71,7 @@ emoji = [
retweet = [ retweet = [
[7, 'mobile_test_2', 'mobile test 2', 'Test account', '@mobile_test', '1234'], [7, 'mobile_test_2', 'mobile test 2', 'Test account', '@mobile_test', '1234'],
[3, 'mobile_test_8', 'mobile test 8', 'jack', '@jack', 'twttr'] [3, 'mobile_test_8', 'mobile test 8', 'jack⚡️', '@jack', 'twttr']
] ]
reply = [ reply = [