mirror of
https://github.com/zedeus/nitter.git
synced 2025-12-05 19:45:36 -05:00
Add experimental x-client-transaction-id support (#1324)
* Add experimental x-client-transaction-id support * Remove broken test
This commit is contained in:
@@ -26,6 +26,7 @@ enableRSS = true # set this to false to disable RSS feeds
|
||||
enableDebug = false # enable request logs and debug endpoints (/.sessions)
|
||||
proxy = "" # http/https url, SOCKS proxies are not supported
|
||||
proxyAuth = ""
|
||||
disableTid = false # enable this if cookie-based auth is failing
|
||||
|
||||
# Change default preferences here, see src/prefs_impl.nim for a complete list
|
||||
[Preferences]
|
||||
|
||||
129
src/api.nim
129
src/api.nim
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import asyncdispatch, httpclient, uri, strutils, sequtils, sugar, tables
|
||||
import asyncdispatch, httpclient, strutils, sequtils, sugar
|
||||
import packedjson
|
||||
import types, query, formatters, consts, apiutils, parser
|
||||
import experimental/parser as newParser
|
||||
@@ -11,95 +11,91 @@ proc genParams(variables: string; fieldToggles = ""): seq[(string, string)] =
|
||||
if fieldToggles.len > 0:
|
||||
result.add ("fieldToggles", fieldToggles)
|
||||
|
||||
proc mediaUrl(id: string; cursor: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVars = userMediaVars % [id, cursor]
|
||||
oauthVars = restIdVars % [id, cursor]
|
||||
result = SessionAwareUrl(
|
||||
cookieUrl: graphUserMedia ? genParams(cookieVars),
|
||||
oauthUrl: graphUserMediaV2 ? genParams(oauthVars)
|
||||
proc apiUrl(endpoint, variables: string; fieldToggles = ""): ApiUrl =
|
||||
return ApiUrl(endpoint: endpoint, params: genParams(variables, fieldToggles))
|
||||
|
||||
proc apiReq(endpoint, variables: string; fieldToggles = ""): ApiReq =
|
||||
let url = apiUrl(endpoint, variables, fieldToggles)
|
||||
return ApiReq(cookie: url, oauth: url)
|
||||
|
||||
proc mediaUrl(id: string; cursor: string): ApiReq =
|
||||
result = ApiReq(
|
||||
cookie: apiUrl(graphUserMedia, userMediaVars % [id, cursor]),
|
||||
oauth: apiUrl(graphUserMediaV2, restIdVars % [id, cursor])
|
||||
)
|
||||
|
||||
proc userTweetsUrl(id: string; cursor: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVars = userTweetsVars % [id, cursor]
|
||||
oauthVars = restIdVars % [id, cursor]
|
||||
result = SessionAwareUrl(
|
||||
# cookieUrl: graphUserTweets ? genParams(cookieVars, userTweetsFieldToggles),
|
||||
oauthUrl: graphUserTweetsV2 ? genParams(oauthVars)
|
||||
proc userTweetsUrl(id: string; cursor: string): ApiReq =
|
||||
result = ApiReq(
|
||||
# cookie: apiUrl(graphUserTweets, userTweetsVars % [id, cursor], userTweetsFieldToggles),
|
||||
oauth: apiUrl(graphUserTweetsV2, restIdVars % [id, cursor])
|
||||
)
|
||||
# might change this in the future pending testing
|
||||
result.cookieUrl = result.oauthUrl
|
||||
result.cookie = result.oauth
|
||||
|
||||
proc userTweetsAndRepliesUrl(id: string; cursor: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVars = userTweetsAndRepliesVars % [id, cursor]
|
||||
oauthVars = restIdVars % [id, cursor]
|
||||
result = SessionAwareUrl(
|
||||
cookieUrl: graphUserTweetsAndReplies ? genParams(cookieVars, userTweetsFieldToggles),
|
||||
oauthUrl: graphUserTweetsAndRepliesV2 ? genParams(oauthVars)
|
||||
proc userTweetsAndRepliesUrl(id: string; cursor: string): ApiReq =
|
||||
let cookieVars = userTweetsAndRepliesVars % [id, cursor]
|
||||
result = ApiReq(
|
||||
cookie: apiUrl(graphUserTweetsAndReplies, cookieVars, userTweetsFieldToggles),
|
||||
oauth: apiUrl(graphUserTweetsAndRepliesV2, restIdVars % [id, cursor])
|
||||
)
|
||||
|
||||
proc tweetDetailUrl(id: string; cursor: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVars = tweetDetailVars % [id, cursor]
|
||||
oauthVars = tweetVars % [id, cursor]
|
||||
result = SessionAwareUrl(
|
||||
cookieUrl: graphTweetDetail ? genParams(cookieVars, tweetDetailFieldToggles),
|
||||
oauthUrl: graphTweet ? genParams(oauthVars)
|
||||
proc tweetDetailUrl(id: string; cursor: string): ApiReq =
|
||||
let cookieVars = tweetDetailVars % [id, cursor]
|
||||
result = ApiReq(
|
||||
cookie: apiUrl(graphTweetDetail, cookieVars, tweetDetailFieldToggles),
|
||||
oauth: apiUrl(graphTweet, tweetVars % [id, cursor])
|
||||
)
|
||||
|
||||
proc userUrl(username: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVars = """{"screen_name":"$1","withGrokTranslatedBio":false}""" % username
|
||||
oauthVars = """{"screen_name": "$1"}""" % username
|
||||
result = SessionAwareUrl(
|
||||
cookieUrl: graphUser ? genParams(cookieVars, tweetDetailFieldToggles),
|
||||
oauthUrl: graphUserV2 ? genParams(oauthVars)
|
||||
proc userUrl(username: string): ApiReq =
|
||||
let cookieVars = """{"screen_name":"$1","withGrokTranslatedBio":false}""" % username
|
||||
result = ApiReq(
|
||||
cookie: apiUrl(graphUser, cookieVars, tweetDetailFieldToggles),
|
||||
oauth: apiUrl(graphUserV2, """{"screen_name": "$1"}""" % username)
|
||||
)
|
||||
|
||||
proc getGraphUser*(username: string): Future[User] {.async.} =
|
||||
if username.len == 0: return
|
||||
let js = await fetchRaw(userUrl(username), Api.userScreenName)
|
||||
let js = await fetchRaw(userUrl(username))
|
||||
result = parseGraphUser(js)
|
||||
|
||||
proc getGraphUserById*(id: string): Future[User] {.async.} =
|
||||
if id.len == 0 or id.any(c => not c.isDigit): return
|
||||
let
|
||||
url = graphUserById ? genParams("""{"rest_id": "$1"}""" % id)
|
||||
js = await fetchRaw(url, Api.userRestId)
|
||||
url = apiReq(graphUserById, """{"rest_id": "$1"}""" % id)
|
||||
js = await fetchRaw(url)
|
||||
result = parseGraphUser(js)
|
||||
|
||||
proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profile] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
|
||||
js = case kind
|
||||
of TimelineKind.tweets:
|
||||
await fetch(userTweetsUrl(id, cursor), Api.userTweets)
|
||||
of TimelineKind.replies:
|
||||
await fetch(userTweetsAndRepliesUrl(id, cursor), Api.userTweetsAndReplies)
|
||||
of TimelineKind.media:
|
||||
await fetch(mediaUrl(id, cursor), Api.userMedia)
|
||||
url = case kind
|
||||
of TimelineKind.tweets: userTweetsUrl(id, cursor)
|
||||
of TimelineKind.replies: userTweetsAndRepliesUrl(id, cursor)
|
||||
of TimelineKind.media: mediaUrl(id, cursor)
|
||||
js = await fetch(url)
|
||||
result = parseGraphTimeline(js, after)
|
||||
|
||||
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
|
||||
url = graphListTweets ? genParams(restIdVars % [id, cursor])
|
||||
result = parseGraphTimeline(await fetch(url, Api.listTweets), after).tweets
|
||||
url = apiReq(graphListTweets, restIdVars % [id, cursor])
|
||||
js = await fetch(url)
|
||||
result = parseGraphTimeline(js, after).tweets
|
||||
|
||||
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
|
||||
let
|
||||
variables = %*{"screenName": name, "listSlug": list}
|
||||
url = graphListBySlug ? genParams($variables)
|
||||
result = parseGraphList(await fetch(url, Api.listBySlug))
|
||||
url = apiReq(graphListBySlug, $variables)
|
||||
js = await fetch(url)
|
||||
result = parseGraphList(js)
|
||||
|
||||
proc getGraphList*(id: string): Future[List] {.async.} =
|
||||
let
|
||||
url = graphListById ? genParams("""{"listId": "$1"}""" % id)
|
||||
result = parseGraphList(await fetch(url, Api.list))
|
||||
url = apiReq(graphListById, """{"listId": "$1"}""" % id)
|
||||
js = await fetch(url)
|
||||
result = parseGraphList(js)
|
||||
|
||||
proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
|
||||
if list.id.len == 0: return
|
||||
@@ -113,22 +109,23 @@ proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.}
|
||||
}
|
||||
if after.len > 0:
|
||||
variables["cursor"] = % after
|
||||
let url = graphListMembers ? genParams($variables)
|
||||
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)
|
||||
let
|
||||
url = apiReq(graphListMembers, $variables)
|
||||
js = await fetchRaw(url)
|
||||
result = parseGraphListMembers(js, after)
|
||||
|
||||
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
variables = """{"rest_id": "$1"}""" % id
|
||||
params = {"variables": variables, "features": gqlFeatures}
|
||||
js = await fetch(graphTweetResult ? params, Api.tweetResult)
|
||||
url = apiReq(graphTweetResult, """{"rest_id": "$1"}""" % id)
|
||||
js = await fetch(url)
|
||||
result = parseGraphTweetResult(js)
|
||||
|
||||
proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
|
||||
js = await fetch(tweetDetailUrl(id, cursor), Api.tweetDetail)
|
||||
js = await fetch(tweetDetailUrl(id, cursor))
|
||||
result = parseGraphConversation(js, id)
|
||||
|
||||
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} =
|
||||
@@ -157,8 +154,10 @@ proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
|
||||
}
|
||||
if after.len > 0:
|
||||
variables["cursor"] = % after
|
||||
let url = graphSearchTimeline ? genParams($variables)
|
||||
result = parseGraphSearch[Tweets](await fetch(url, Api.search), after)
|
||||
let
|
||||
url = apiReq(graphSearchTimeline, $variables)
|
||||
js = await fetch(url)
|
||||
result = parseGraphSearch[Tweets](js, after)
|
||||
result.query = query
|
||||
|
||||
proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
|
||||
@@ -179,13 +178,15 @@ proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.}
|
||||
variables["cursor"] = % after
|
||||
result.beginning = false
|
||||
|
||||
let url = graphSearchTimeline ? genParams($variables)
|
||||
result = parseGraphSearch[User](await fetch(url, Api.search), after)
|
||||
let
|
||||
url = apiReq(graphSearchTimeline, $variables)
|
||||
js = await fetch(url)
|
||||
result = parseGraphSearch[User](js, after)
|
||||
result.query = query
|
||||
|
||||
proc getPhotoRail*(id: string): Future[PhotoRail] {.async.} =
|
||||
if id.len == 0: return
|
||||
let js = await fetch(mediaUrl(id, ""), Api.userMedia)
|
||||
let js = await fetch(mediaUrl(id, ""))
|
||||
result = parseGraphPhotoRail(js)
|
||||
|
||||
proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
|
||||
import jsony, packedjson, zippy, oauth1
|
||||
import types, auth, consts, parserutils, http_pool
|
||||
import types, auth, consts, parserutils, http_pool, tid
|
||||
import experimental/types/common
|
||||
|
||||
const
|
||||
@@ -10,7 +10,21 @@ const
|
||||
rlLimit = "x-rate-limit-limit"
|
||||
errorsToSkip = {null, doesntExist, tweetNotFound, timeout, unauthorized, badRequest}
|
||||
|
||||
var pool: HttpPool
|
||||
var
|
||||
pool: HttpPool
|
||||
disableTid: bool
|
||||
|
||||
proc setDisableTid*(disable: bool) =
|
||||
disableTid = disable
|
||||
|
||||
proc toUrl(req: ApiReq; sessionKind: SessionKind): Uri =
|
||||
case sessionKind
|
||||
of oauth:
|
||||
let o = req.oauth
|
||||
parseUri("https://api.x.com/graphql") / o.endpoint ? o.params
|
||||
of cookie:
|
||||
let c = req.cookie
|
||||
parseUri("https://x.com/i/api/graphql") / c.endpoint ? c.params
|
||||
|
||||
proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
|
||||
let
|
||||
@@ -32,15 +46,15 @@ proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
|
||||
proc getCookieHeader(authToken, ct0: string): string =
|
||||
"auth_token=" & authToken & "; ct0=" & ct0
|
||||
|
||||
proc genHeaders*(session: Session, url: string): HttpHeaders =
|
||||
proc genHeaders*(session: Session, url: Uri): Future[HttpHeaders] {.async.} =
|
||||
result = newHttpHeaders({
|
||||
"connection": "keep-alive",
|
||||
"content-type": "application/json",
|
||||
"x-twitter-active-user": "yes",
|
||||
"x-twitter-client-language": "en",
|
||||
"authority": "api.x.com",
|
||||
"origin": "https://x.com",
|
||||
"accept-encoding": "gzip",
|
||||
"accept-language": "en-US,en;q=0.9",
|
||||
"accept-language": "en-US,en;q=0.5",
|
||||
"accept": "*/*",
|
||||
"DNT": "1",
|
||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"
|
||||
@@ -48,15 +62,20 @@ proc genHeaders*(session: Session, url: string): HttpHeaders =
|
||||
|
||||
case session.kind
|
||||
of SessionKind.oauth:
|
||||
result["authorization"] = getOauthHeader(url, session.oauthToken, session.oauthSecret)
|
||||
result["authority"] = "api.x.com"
|
||||
result["authorization"] = getOauthHeader($url, session.oauthToken, session.oauthSecret)
|
||||
of SessionKind.cookie:
|
||||
result["authorization"] = "Bearer AAAAAAAAAAAAAAAAAAAAAFXzAwAAAAAAMHCxpeSDG1gLNLghVe8d74hl6k4%3DRUMF4xAQLsbeBhTSRrCiQpJtxoGWeyHrDb5te2jpGskWDFW82F"
|
||||
result["x-twitter-auth-type"] = "OAuth2Session"
|
||||
result["x-csrf-token"] = session.ct0
|
||||
result["cookie"] = getCookieHeader(session.authToken, session.ct0)
|
||||
if disableTid:
|
||||
result["authorization"] = bearerToken2
|
||||
else:
|
||||
result["authorization"] = bearerToken
|
||||
result["x-client-transaction-id"] = await genTid(url.path)
|
||||
|
||||
proc getAndValidateSession*(api: Api): Future[Session] {.async.} =
|
||||
result = await getSession(api)
|
||||
proc getAndValidateSession*(req: ApiReq): Future[Session] {.async.} =
|
||||
result = await getSession(req)
|
||||
case result.kind
|
||||
of SessionKind.oauth:
|
||||
if result.oauthToken.len == 0:
|
||||
@@ -73,7 +92,7 @@ template fetchImpl(result, fetchBody) {.dirty.} =
|
||||
|
||||
try:
|
||||
var resp: AsyncResponse
|
||||
pool.use(genHeaders(session, $url)):
|
||||
pool.use(await genHeaders(session, url)):
|
||||
template getContent =
|
||||
resp = await c.get($url)
|
||||
result = await resp.body
|
||||
@@ -89,7 +108,7 @@ template fetchImpl(result, fetchBody) {.dirty.} =
|
||||
remaining = parseInt(resp.headers[rlRemaining])
|
||||
reset = parseInt(resp.headers[rlReset])
|
||||
limit = parseInt(resp.headers[rlLimit])
|
||||
session.setRateLimit(api, remaining, reset, limit)
|
||||
session.setRateLimit(req, remaining, reset, limit)
|
||||
|
||||
if result.len > 0:
|
||||
if resp.headers.getOrDefault("content-encoding") == "gzip":
|
||||
@@ -98,24 +117,22 @@ template fetchImpl(result, fetchBody) {.dirty.} =
|
||||
if result.startsWith("{\"errors"):
|
||||
let errors = result.fromJson(Errors)
|
||||
if errors notin errorsToSkip:
|
||||
echo "Fetch error, API: ", api, ", errors: ", errors
|
||||
echo "Fetch error, API: ", url.path, ", errors: ", errors
|
||||
if errors in {expiredToken, badToken, locked}:
|
||||
invalidate(session)
|
||||
raise rateLimitError()
|
||||
elif errors in {rateLimited}:
|
||||
# rate limit hit, resets after 24 hours
|
||||
setLimited(session, api)
|
||||
setLimited(session, req)
|
||||
raise rateLimitError()
|
||||
elif result.startsWith("429 Too Many Requests"):
|
||||
echo "[sessions] 429 error, API: ", api, ", session: ", session.pretty
|
||||
session.apis[api].remaining = 0
|
||||
# rate limit hit, resets after the 15 minute window
|
||||
echo "[sessions] 429 error, API: ", url.path, ", session: ", session.pretty
|
||||
raise rateLimitError()
|
||||
|
||||
fetchBody
|
||||
|
||||
if resp.status == $Http400:
|
||||
echo "ERROR 400, ", api, ": ", result
|
||||
echo "ERROR 400, ", url.path, ": ", result
|
||||
raise newException(InternalError, $url)
|
||||
except InternalError as e:
|
||||
raise e
|
||||
@@ -134,19 +151,16 @@ template retry(bod) =
|
||||
try:
|
||||
bod
|
||||
except RateLimitError:
|
||||
echo "[sessions] Rate limited, retrying ", api, " request..."
|
||||
echo "[sessions] Rate limited, retrying ", req.cookie.endpoint, " request..."
|
||||
bod
|
||||
|
||||
proc fetch*(url: Uri | SessionAwareUrl; api: Api): Future[JsonNode] {.async.} =
|
||||
proc fetch*(req: ApiReq): Future[JsonNode] {.async.} =
|
||||
retry:
|
||||
var
|
||||
body: string
|
||||
session = await getAndValidateSession(api)
|
||||
session = await getAndValidateSession(req)
|
||||
|
||||
when url is SessionAwareUrl:
|
||||
let url = case session.kind
|
||||
of SessionKind.oauth: url.oauthUrl
|
||||
of SessionKind.cookie: url.cookieUrl
|
||||
let url = req.toUrl(session.kind)
|
||||
|
||||
fetchImpl body:
|
||||
if body.startsWith('{') or body.startsWith('['):
|
||||
@@ -157,19 +171,15 @@ proc fetch*(url: Uri | SessionAwareUrl; api: Api): Future[JsonNode] {.async.} =
|
||||
|
||||
let error = result.getError
|
||||
if error != null and error notin errorsToSkip:
|
||||
echo "Fetch error, API: ", api, ", error: ", error
|
||||
echo "Fetch error, API: ", url.path, ", error: ", error
|
||||
if error in {expiredToken, badToken, locked}:
|
||||
invalidate(session)
|
||||
raise rateLimitError()
|
||||
|
||||
proc fetchRaw*(url: Uri | SessionAwareUrl; api: Api): Future[string] {.async.} =
|
||||
proc fetchRaw*(req: ApiReq): Future[string] {.async.} =
|
||||
retry:
|
||||
var session = await getAndValidateSession(api)
|
||||
|
||||
when url is SessionAwareUrl:
|
||||
let url = case session.kind
|
||||
of SessionKind.oauth: url.oauthUrl
|
||||
of SessionKind.cookie: url.cookieUrl
|
||||
var session = await getAndValidateSession(req)
|
||||
let url = req.toUrl(session.kind)
|
||||
|
||||
fetchImpl result:
|
||||
if not (result.startsWith('{') or result.startsWith('[')):
|
||||
|
||||
32
src/auth.nim
32
src/auth.nim
@@ -1,6 +1,6 @@
|
||||
#SPDX-License-Identifier: AGPL-3.0-only
|
||||
import std/[asyncdispatch, times, json, random, sequtils, strutils, tables, packedsets, os]
|
||||
import types
|
||||
import std/[asyncdispatch, times, json, random, strutils, tables, packedsets, os]
|
||||
import types, consts
|
||||
import experimental/parser/session
|
||||
|
||||
# max requests at a time per session to avoid race conditions
|
||||
@@ -15,6 +15,11 @@ var
|
||||
template log(str: varargs[string, `$`]) =
|
||||
echo "[sessions] ", str.join("")
|
||||
|
||||
proc endpoint(req: ApiReq; session: Session): string =
|
||||
case session.kind
|
||||
of oauth: req.oauth.endpoint
|
||||
of cookie: req.cookie.endpoint
|
||||
|
||||
proc pretty*(session: Session): string =
|
||||
if session.isNil:
|
||||
return "<null>"
|
||||
@@ -122,11 +127,12 @@ proc rateLimitError*(): ref RateLimitError =
|
||||
proc noSessionsError*(): ref NoSessionsError =
|
||||
newException(NoSessionsError, "no sessions available")
|
||||
|
||||
proc isLimited(session: Session; api: Api): bool =
|
||||
proc isLimited(session: Session; req: ApiReq): bool =
|
||||
if session.isNil:
|
||||
return true
|
||||
|
||||
if session.limited and api != Api.userTweets:
|
||||
let api = req.endpoint(session)
|
||||
if session.limited and api != graphUserTweetsV2:
|
||||
if (epochTime().int - session.limitedAt) > hourInSeconds:
|
||||
session.limited = false
|
||||
log "resetting limit: ", session.pretty
|
||||
@@ -140,8 +146,8 @@ proc isLimited(session: Session; api: Api): bool =
|
||||
else:
|
||||
return false
|
||||
|
||||
proc isReady(session: Session; api: Api): bool =
|
||||
not (session.isNil or session.pending > maxConcurrentReqs or session.isLimited(api))
|
||||
proc isReady(session: Session; req: ApiReq): bool =
|
||||
not (session.isNil or session.pending > maxConcurrentReqs or session.isLimited(req))
|
||||
|
||||
proc invalidate*(session: var Session) =
|
||||
if session.isNil: return
|
||||
@@ -156,24 +162,26 @@ proc release*(session: Session) =
|
||||
if session.isNil: return
|
||||
dec session.pending
|
||||
|
||||
proc getSession*(api: Api): Future[Session] {.async.} =
|
||||
proc getSession*(req: ApiReq): Future[Session] {.async.} =
|
||||
for i in 0 ..< sessionPool.len:
|
||||
if result.isReady(api): break
|
||||
if result.isReady(req): break
|
||||
result = sessionPool.sample()
|
||||
|
||||
if not result.isNil and result.isReady(api):
|
||||
if not result.isNil and result.isReady(req):
|
||||
inc result.pending
|
||||
else:
|
||||
log "no sessions available for API: ", api
|
||||
log "no sessions available for API: ", req.cookie.endpoint
|
||||
raise noSessionsError()
|
||||
|
||||
proc setLimited*(session: Session; api: Api) =
|
||||
proc setLimited*(session: Session; req: ApiReq) =
|
||||
let api = req.endpoint(session)
|
||||
session.limited = true
|
||||
session.limitedAt = epochTime().int
|
||||
log "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", ", session.pretty
|
||||
|
||||
proc setRateLimit*(session: Session; api: Api; remaining, reset, limit: int) =
|
||||
proc setRateLimit*(session: Session; req: ApiReq; remaining, reset, limit: int) =
|
||||
# avoid undefined behavior in race conditions
|
||||
let api = req.endpoint(session)
|
||||
if api in session.apis:
|
||||
let rateLimit = session.apis[api]
|
||||
if rateLimit.reset >= reset and rateLimit.remaining < remaining:
|
||||
|
||||
@@ -40,7 +40,8 @@ proc getConfig*(path: string): (Config, parseCfg.Config) =
|
||||
enableRss: cfg.get("Config", "enableRSS", true),
|
||||
enableDebug: cfg.get("Config", "enableDebug", false),
|
||||
proxy: cfg.get("Config", "proxy", ""),
|
||||
proxyAuth: cfg.get("Config", "proxyAuth", "")
|
||||
proxyAuth: cfg.get("Config", "proxyAuth", ""),
|
||||
disableTid: cfg.get("Config", "disableTid", false)
|
||||
)
|
||||
|
||||
return (conf, cfg)
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import uri, strutils
|
||||
import strutils
|
||||
|
||||
const
|
||||
consumerKey* = "3nVuSoBZnx6U4vzUxf5w"
|
||||
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
|
||||
bearerToken* = "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
|
||||
bearerToken2* = "Bearer AAAAAAAAAAAAAAAAAAAAAFXzAwAAAAAAMHCxpeSDG1gLNLghVe8d74hl6k4%3DRUMF4xAQLsbeBhTSRrCiQpJtxoGWeyHrDb5te2jpGskWDFW82F"
|
||||
|
||||
gql = parseUri("https://api.x.com") / "graphql"
|
||||
|
||||
graphUser* = gql / "-oaLodhGbbnzJBACb1kk2Q/UserByScreenName"
|
||||
graphUserV2* = gql / "WEoGnYB0EG1yGwamDCF6zg/UserResultByScreenNameQuery"
|
||||
graphUserById* = gql / "VN33vKXrPT7p35DgNR27aw/UserResultByIdQuery"
|
||||
graphUserTweetsV2* = gql / "6QdSuZ5feXxOadEdXa4XZg/UserWithProfileTweetsQueryV2"
|
||||
graphUserTweetsAndRepliesV2* = gql / "BDX77Xzqypdt11-mDfgdpQ/UserWithProfileTweetsAndRepliesQueryV2"
|
||||
graphUserTweets* = gql / "oRJs8SLCRNRbQzuZG93_oA/UserTweets"
|
||||
graphUserTweetsAndReplies* = gql / "kkaJ0Mf34PZVarrxzLihjg/UserTweetsAndReplies"
|
||||
graphUserMedia* = gql / "36oKqyQ7E_9CmtONGjJRsA/UserMedia"
|
||||
graphUserMediaV2* = gql / "bp0e_WdXqgNBIwlLukzyYA/MediaTimelineV2"
|
||||
graphTweet* = gql / "Y4Erk_-0hObvLpz0Iw3bzA/ConversationTimeline"
|
||||
graphTweetDetail* = gql / "YVyS4SfwYW7Uw5qwy0mQCA/TweetDetail"
|
||||
graphTweetResult* = gql / "nzme9KiYhfIOrrLrPP_XeQ/TweetResultByIdQuery"
|
||||
graphSearchTimeline* = gql / "bshMIjqDk8LTXTq4w91WKw/SearchTimeline"
|
||||
graphListById* = gql / "cIUpT1UjuGgl_oWiY7Snhg/ListByRestId"
|
||||
graphListBySlug* = gql / "K6wihoTiTrzNzSF8y1aeKQ/ListBySlug"
|
||||
graphListMembers* = gql / "fuVHh5-gFn8zDBBxb8wOMA/ListMembers"
|
||||
graphListTweets* = gql / "VQf8_XQynI3WzH6xopOMMQ/ListTimeline"
|
||||
graphUser* = "-oaLodhGbbnzJBACb1kk2Q/UserByScreenName"
|
||||
graphUserV2* = "WEoGnYB0EG1yGwamDCF6zg/UserResultByScreenNameQuery"
|
||||
graphUserById* = "VN33vKXrPT7p35DgNR27aw/UserResultByIdQuery"
|
||||
graphUserTweetsV2* = "6QdSuZ5feXxOadEdXa4XZg/UserWithProfileTweetsQueryV2"
|
||||
graphUserTweetsAndRepliesV2* = "BDX77Xzqypdt11-mDfgdpQ/UserWithProfileTweetsAndRepliesQueryV2"
|
||||
graphUserTweets* = "oRJs8SLCRNRbQzuZG93_oA/UserTweets"
|
||||
graphUserTweetsAndReplies* = "kkaJ0Mf34PZVarrxzLihjg/UserTweetsAndReplies"
|
||||
graphUserMedia* = "36oKqyQ7E_9CmtONGjJRsA/UserMedia"
|
||||
graphUserMediaV2* = "bp0e_WdXqgNBIwlLukzyYA/MediaTimelineV2"
|
||||
graphTweet* = "Y4Erk_-0hObvLpz0Iw3bzA/ConversationTimeline"
|
||||
graphTweetDetail* = "YVyS4SfwYW7Uw5qwy0mQCA/TweetDetail"
|
||||
graphTweetResult* = "nzme9KiYhfIOrrLrPP_XeQ/TweetResultByIdQuery"
|
||||
graphSearchTimeline* = "bshMIjqDk8LTXTq4w91WKw/SearchTimeline"
|
||||
graphListById* = "cIUpT1UjuGgl_oWiY7Snhg/ListByRestId"
|
||||
graphListBySlug* = "K6wihoTiTrzNzSF8y1aeKQ/ListBySlug"
|
||||
graphListMembers* = "fuVHh5-gFn8zDBBxb8wOMA/ListMembers"
|
||||
graphListTweets* = "VQf8_XQynI3WzH6xopOMMQ/ListTimeline"
|
||||
|
||||
gqlFeatures* = """{
|
||||
"android_ad_formats_media_component_render_overlay_enabled": false,
|
||||
|
||||
8
src/experimental/parser/tid.nim
Normal file
8
src/experimental/parser/tid.nim
Normal file
@@ -0,0 +1,8 @@
|
||||
import jsony
|
||||
import ../types/tid
|
||||
export TidPair
|
||||
|
||||
proc parseTidPairs*(raw: string): seq[TidPair] =
|
||||
result = raw.fromJson(seq[TidPair])
|
||||
if result.len == 0:
|
||||
raise newException(ValueError, "Parsing pairs failed: " & raw)
|
||||
4
src/experimental/types/tid.nim
Normal file
4
src/experimental/types/tid.nim
Normal file
@@ -0,0 +1,4 @@
|
||||
type
|
||||
TidPair* = object
|
||||
animationKey*: string
|
||||
verification*: string
|
||||
@@ -6,7 +6,7 @@ from os import getEnv
|
||||
|
||||
import jester
|
||||
|
||||
import types, config, prefs, formatters, redis_cache, http_pool, auth
|
||||
import types, config, prefs, formatters, redis_cache, http_pool, auth, apiutils
|
||||
import views/[general, about]
|
||||
import routes/[
|
||||
preferences, timeline, status, media, search, rss, list, debug,
|
||||
@@ -37,6 +37,7 @@ setHmacKey(cfg.hmacKey)
|
||||
setProxyEncoding(cfg.base64Media)
|
||||
setMaxHttpConns(cfg.httpMaxConns)
|
||||
setHttpProxy(cfg.proxy, cfg.proxyAuth)
|
||||
setDisableTid(cfg.disableTid)
|
||||
initAboutPage(cfg.staticDir)
|
||||
|
||||
waitFor initRedisPool(cfg)
|
||||
|
||||
62
src/tid.nim
Normal file
62
src/tid.nim
Normal file
@@ -0,0 +1,62 @@
|
||||
import std/[asyncdispatch, base64, httpclient, random, strutils, sequtils, times]
|
||||
import nimcrypto
|
||||
import experimental/parser/tid
|
||||
|
||||
randomize()
|
||||
|
||||
const defaultKeyword = "obfiowerehiring";
|
||||
const pairsUrl =
|
||||
"https://raw.githubusercontent.com/fa0311/x-client-transaction-id-pair-dict/refs/heads/main/pair.json";
|
||||
|
||||
var
|
||||
cachedPairs: seq[TidPair] = @[]
|
||||
lastCached = 0
|
||||
# refresh every hour
|
||||
ttlSec = 60 * 60
|
||||
|
||||
proc getPair(): Future[TidPair] {.async.} =
|
||||
if cachedPairs.len == 0 or int(epochTime()) - lastCached > ttlSec:
|
||||
lastCached = int(epochTime())
|
||||
|
||||
let client = newAsyncHttpClient()
|
||||
defer: client.close()
|
||||
|
||||
let resp = await client.get(pairsUrl)
|
||||
if resp.status == $Http200:
|
||||
cachedPairs = parseTidPairs(await resp.body)
|
||||
|
||||
return sample(cachedPairs)
|
||||
|
||||
proc encodeSha256(text: string): array[32, byte] =
|
||||
let
|
||||
data = cast[ptr byte](addr text[0])
|
||||
dataLen = uint(len(text))
|
||||
digest = sha256.digest(data, dataLen)
|
||||
return digest.data
|
||||
|
||||
proc encodeBase64[T](data: T): string =
|
||||
return encode(data).replace("=", "")
|
||||
|
||||
proc decodeBase64(data: string): seq[byte] =
|
||||
return cast[seq[byte]](decode(data))
|
||||
|
||||
proc genTid*(path: string): Future[string] {.async.} =
|
||||
let
|
||||
pair = await getPair()
|
||||
|
||||
timeNow = int(epochTime() - 1682924400)
|
||||
timeNowBytes = @[
|
||||
byte(timeNow and 0xff),
|
||||
byte((timeNow shr 8) and 0xff),
|
||||
byte((timeNow shr 16) and 0xff),
|
||||
byte((timeNow shr 24) and 0xff)
|
||||
]
|
||||
|
||||
data = "GET!" & path & "!" & $timeNow & defaultKeyword & pair.animationKey
|
||||
hashBytes = encodeSha256(data)
|
||||
keyBytes = decodeBase64(pair.verification)
|
||||
bytesArr = keyBytes & timeNowBytes & hashBytes[0 ..< 16] & @[3'u8]
|
||||
randomNum = byte(rand(256))
|
||||
tid = @[randomNum] & bytesArr.mapIt(it xor randomNum)
|
||||
|
||||
return encodeBase64(tid)
|
||||
@@ -1,5 +1,5 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import times, sequtils, options, tables, uri
|
||||
import times, sequtils, options, tables
|
||||
import prefs_impl
|
||||
|
||||
genPrefsType()
|
||||
@@ -13,19 +13,13 @@ type
|
||||
TimelineKind* {.pure.} = enum
|
||||
tweets, replies, media
|
||||
|
||||
Api* {.pure.} = enum
|
||||
tweetDetail
|
||||
tweetResult
|
||||
search
|
||||
list
|
||||
listBySlug
|
||||
listMembers
|
||||
listTweets
|
||||
userRestId
|
||||
userScreenName
|
||||
userTweets
|
||||
userTweetsAndReplies
|
||||
userMedia
|
||||
ApiUrl* = object
|
||||
endpoint*: string
|
||||
params*: seq[(string, string)]
|
||||
|
||||
ApiReq* = object
|
||||
oauth*: ApiUrl
|
||||
cookie*: ApiUrl
|
||||
|
||||
RateLimit* = object
|
||||
limit*: int
|
||||
@@ -42,7 +36,7 @@ type
|
||||
pending*: int
|
||||
limited*: bool
|
||||
limitedAt*: int
|
||||
apis*: Table[Api, RateLimit]
|
||||
apis*: Table[string, RateLimit]
|
||||
case kind*: SessionKind
|
||||
of oauth:
|
||||
oauthToken*: string
|
||||
@@ -51,10 +45,6 @@ type
|
||||
authToken*: string
|
||||
ct0*: string
|
||||
|
||||
SessionAwareUrl* = object
|
||||
oauthUrl*: Uri
|
||||
cookieUrl*: Uri
|
||||
|
||||
Error* = enum
|
||||
null = 0
|
||||
noUserMatches = 17
|
||||
@@ -285,6 +275,7 @@ type
|
||||
enableDebug*: bool
|
||||
proxy*: string
|
||||
proxyAuth*: string
|
||||
disableTid*: bool
|
||||
|
||||
rssCacheTime*: int
|
||||
listCacheTime*: int
|
||||
|
||||
@@ -11,12 +11,7 @@ card = [
|
||||
['voidtarget/status/1094632512926605312',
|
||||
'Basic OBS Studio plugin, written in nim, supporting C++ (C fine too)',
|
||||
'Basic OBS Studio plugin, written in nim, supporting C++ (C fine too) - obsplugin.nim',
|
||||
'gist.github.com', True],
|
||||
|
||||
['nim_lang/status/1082989146040340480',
|
||||
'Nim in 2018: A short recap',
|
||||
'There were several big news in the Nim world in 2018 – two new major releases, partnership with Status, and much more. But let us go chronologically.',
|
||||
'nim-lang.org', True]
|
||||
'gist.github.com', True]
|
||||
]
|
||||
|
||||
no_thumb = [
|
||||
|
||||
@@ -94,7 +94,7 @@ async def login_and_get_cookies(username, password, totp_seed=None, headless=Fal
|
||||
|
||||
async def main():
|
||||
if len(sys.argv) < 3:
|
||||
print('Usage: python3 twitter-auth.py username password [totp_seed] [--append sessions.jsonl] [--headless]')
|
||||
print('Usage: python3 create_session_browser.py username password [totp_seed] [--append file.jsonl] [--headless]')
|
||||
sys.exit(1)
|
||||
|
||||
username = sys.argv[1]
|
||||
|
||||
Reference in New Issue
Block a user