Compare commits

...

65 commits

Author SHA1 Message Date
root
e45a61cabf Merge github/master 2023-11-10 14:59:38 +00:00
Zed
eaedd2aee7 Fix ARM64 Dockerfile versions 2023-11-08 16:38:43 +00:00
Zed
5e188647fc Bump Nim in the ARM64 Dockerfile, add nitter user 2023-11-08 14:53:35 +00:00
Zed
e0d9dd0f9c Fix #670 2023-11-08 14:27:22 +00:00
Zed
d17583286a Don't requests made before reset 2023-11-01 05:44:59 +00:00
Zed
209f453b79 Purge expired accounts after parsing 2023-11-01 05:09:44 +00:00
Zed
e1838e0933 Move CI workflow to buildjet 2023-11-01 05:09:35 +00:00
Zed
623424f516 Fix outdated test 2023-11-01 04:52:44 +00:00
Zed
7b3fcdc622 Fix guest accounts CI setup attempt 4 2023-11-01 04:19:10 +00:00
Zed
1d20bd01cb Remove redundant "active" field from /.health 2023-11-01 04:16:33 +00:00
Zed
58e73a14c5 Fix guest accounts CI setup attempt 3 2023-11-01 04:13:22 +00:00
Zed
b0b335106d Fix missing CI file argument 2023-11-01 04:06:42 +00:00
Zed
006b91c903 Prevent annoying warnings on devel 2023-11-01 04:04:45 +00:00
Zed
33bad37128 Fix guest accounts CI setup attempt 2 2023-11-01 01:25:00 +00:00
Zed
b930a3d5bf Fix guest accounts CI setup 2023-10-31 23:54:11 +00:00
Zed
bd0be724f0 Merge branch 'master' into guest_accounts 2023-10-31 23:47:02 +00:00
Zed
60a82563da Run tests on multiple Nim versions 2023-10-31 23:46:24 +00:00
Zed
b8103cf501 Fix compilation on Nim 1.6.x 2023-10-31 23:02:45 +00:00
Émilien (perso)
b62d73dbd3
nim version min require + update dockerfile arm (#1053) 2023-10-31 22:33:08 +00:00
Zed
4120558649 Replace /.tokens with /.health and /.accounts 2023-10-31 12:04:32 +00:00
Zed
089275826c Bump minimum Nim version 2023-10-31 11:33:24 +00:00
Zed
edad09f4c9 Update nimcrypto and jsony 2023-10-31 08:31:51 +00:00
Zed
32e3469e3a Fix multi-user timelines 2023-10-31 05:53:55 +00:00
LS
735b30c2da
fix(nitter): add graphql user search (#1047)
* fix(nitter): add graphql user search

* fix(nitter): rm gitignore 2nd guest_accounts

* fix(nitter): keep query from user search in result. remove personal mods

* fix(nitter): removce useless line gitignore
2023-10-30 12:13:06 +00:00
cf74baef34 Merge github/master 2023-09-21 15:57:50 +02:00
Zed
537af7fd5e Improve Liberapay css for Firefox compatibility 2023-09-19 01:29:41 +00:00
Zed
7d14789910 Improve guest accounts loading, add JSONL support 2023-09-18 18:26:01 +00:00
Zed
7abcb489f4 Increase photo rail cache ttl 2023-09-18 17:15:09 +00:00
16bedef8b1 Merge github/master 2023-09-17 12:43:37 +02:00
c235aa0385 Fix helper script for guest_accounts branch 2023-09-17 12:43:31 +02:00
395469614c Add helper scripts for deployment 2023-09-17 12:41:01 +02:00
Zed
14f9a092d8
Fix crash on missing quote tweet data crash 2023-09-14 23:35:41 +00:00
Zed
fcd74e8048 Retry rate limited requests with different account 2023-09-02 08:15:58 +02:00
Zed
4250245263 Shorten media proxy error log 2023-09-02 07:28:56 +02:00
Zed
b8fe212e94 Add media proxying error logging 2023-09-01 21:39:02 +02:00
712e299f1d Merge remote-tracking branch 'github/guest_accounts' into guest_accounts 2023-08-31 09:27:40 +02:00
Zed
84dcf49079 Fix negative pending requests bug 2023-08-31 05:07:12 +02:00
Zed
82beb5da8c Add empty oauth token logging 2023-08-31 01:31:27 +02:00
Zed
282ce8b0e9 Add 429 logging 2023-08-31 01:29:54 +02:00
Zed
37b58a5a7e Fix accounts logging 2023-08-30 03:43:49 +02:00
Zed
898b19b92f Improve rate limit handling, minor refactor 2023-08-30 03:10:21 +02:00
Zed
986b91ac73 Handle ProtocolError and BadClientError equally 2023-08-29 23:58:03 +02:00
Zed
4ccf350dc7 Improve .tokens output 2023-08-29 23:45:18 +02:00
Zed
7630f57f17 Fix cards not being displayed 2023-08-26 05:16:38 +02:00
Zed
03794a8d4a Cleanup 2023-08-25 16:32:39 +02:00
Zed
ae9fa02bf5 Switch to TweetDetail for tweets 2023-08-25 16:28:30 +02:00
Zed
88b005c9da Revert "Switch to using typeahead for user search"
This reverts commit a3e11e3272.
2023-08-23 19:31:40 +02:00
78b4ea056b Merge remote-tracking branch 'github/guest_accounts' into guest_accounts 2023-08-23 10:15:55 +02:00
Zed
a3e11e3272 Switch to using typeahead for user search 2023-08-23 10:14:44 +02:00
ddd6d38b4c Merge remote-tracking branch 'github/guest_accounts' into guest_accounts 2023-08-22 14:35:11 +02:00
Zed
45808361af Fix tweetDetail stats 2023-08-22 04:45:49 +02:00
Zed
8df5256c1d Switch back to old user search endpoint 2023-08-22 04:33:14 +02:00
Zed
6e8744943f Tweak /.tokens, add amount of limited accounts 2023-08-22 03:43:18 +02:00
Zed
5c08e6a774 Fix compilation on older versions of Nim 2023-08-22 02:27:44 +02:00
Zed
30bdf3a14e Reduce max concurrent pending requests per account 2023-08-22 01:32:28 +02:00
Zed
12504bcffe
Fix compilation error 2023-08-21 18:12:06 +02:00
Zed
c3d9441370
Unify some guest account logs 2023-08-21 14:49:50 +02:00
Zed
51714b5ad2
Add guest accounts variable to GitHub action 2023-08-21 11:25:27 +02:00
f2ded06a53 Add guest accounts to docker container 2023-08-20 17:29:17 +02:00
740be2c7e5 Merge remote-tracking branch 'github/guest_accounts' into guest_accounts 2023-08-20 17:28:23 +02:00
Zed
e8b5cbef7b Add missing limitedAt assignment 2023-08-20 12:31:08 +02:00
Zed
3d8858f0d8 Track rate limits, reset after 24 hours 2023-08-20 11:56:42 +02:00
74ffb1c023 Merge remote-tracking branch 'github/guest_accounts' into guest_accounts 2023-08-19 08:46:57 +02:00
Zed
bbd68e6840 Filter out account limits that already reset 2023-08-19 01:13:36 +02:00
Zed
3572dd7771 Replace tokens with guest accounts, swap endpoints 2023-08-19 00:25:14 +02:00
35 changed files with 496 additions and 549 deletions

View file

@ -10,25 +10,34 @@ on:
jobs:
test:
runs-on: ubuntu-latest
runs-on: buildjet-2vcpu-ubuntu-2204
strategy:
matrix:
nim:
- "1.6.10"
- "1.6.x"
- "2.0.x"
- "devel"
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Cache nimble
id: cache-nimble
uses: actions/cache@v3
uses: buildjet/cache@v3
with:
path: ~/.nimble
key: nimble-${{ hashFiles('*.nimble') }}
restore-keys: "nimble-"
key: ${{ matrix.nim }}-nimble-${{ hashFiles('*.nimble') }}
restore-keys: |
${{ matrix.nim }}-nimble-
- uses: actions/setup-python@v4
with:
python-version: "3.10"
cache: "pip"
- uses: jiro4989/setup-nim-action@v1
with:
nim-version: "1.x"
nim-version: ${{ matrix.nim }}
repo-token: ${{ secrets.GITHUB_TOKEN }}
- run: nimble build -d:release -Y
- run: pip install seleniumbase
- run: seleniumbase install chromedriver
@ -37,9 +46,11 @@ jobs:
run: |
sudo apt install libsass-dev -y
cp nitter.example.conf nitter.conf
sed -i 's/enableDebug = false/enableDebug = true/g' nitter.conf
nimble md
nimble scss
echo '${{ secrets.GUEST_ACCOUNTS }}' > ./guest_accounts.jsonl
- name: Run tests
run: |
./nitter &
pytest -n4 tests
pytest -n8 tests

1
.gitignore vendored
View file

@ -10,4 +10,5 @@ nitter
/public/css/style.css
/public/md/*.html
nitter.conf
guest_accounts.json*
dump.rdb

View file

@ -1,7 +1,7 @@
FROM alpine:3.17 as nim
FROM alpine:3.18 as nim
LABEL maintainer="setenforce@protonmail.com"
RUN apk --no-cache add gcc git libc-dev libsass-dev "nim=1.6.8-r0" nimble pcre
RUN apk --no-cache add libsass-dev pcre gcc git libc-dev "nim=1.6.14-r0" "nimble=0.13.1-r2"
WORKDIR /src/nitter
@ -13,11 +13,13 @@ RUN nimble build -d:danger -d:lto -d:strip \
&& nimble scss \
&& nimble md
FROM alpine:3.17
FROM alpine:3.18
WORKDIR /src/
RUN apk --no-cache add ca-certificates pcre openssl1.1-compat
RUN apk --no-cache add pcre ca-certificates openssl1.1-compat
COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public
EXPOSE 8080
RUN adduser -h /src/ -D -s /bin/sh nitter
USER nitter
CMD ./nitter

View file

@ -7,12 +7,7 @@
# disable annoying warnings
warning("GcUnsafe2", off)
warning("HoleEnumConv", off)
hint("XDeclaredButNotUsed", off)
hint("XCannotRaiseY", off)
hint("User", off)
const
nimVersion = (major: NimMajor, minor: NimMinor, patch: NimPatch)
when nimVersion >= (1, 6, 0):
warning("HoleEnumConv", off)

View file

@ -4,12 +4,13 @@ services:
nitter:
image: git.nolog.cz/nolog.cz/nitter:latest
#build: .
build: .
container_name: nitter
ports:
- "8080:8080" # Replace with "8080:8080" if you don't use a reverse proxy
volumes:
- ./nitter.conf:/src/nitter.conf:Z,ro
- ./guest_accounts.json:/src/guest_accounts.json:Z,ro
depends_on:
- nitter-redis
restart: unless-stopped

View file

@ -23,7 +23,7 @@ redisMaxConnections = 30
hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds
enableDebug = false # enable request logs and debug endpoints (/.tokens)
enableDebug = false # enable request logs and debug endpoints (/.accounts)
proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = ""
tokenCount = 10

View file

@ -10,11 +10,11 @@ bin = @["nitter"]
# Dependencies
requires "nim >= 1.4.8"
requires "nim >= 1.6.10"
requires "jester#baca3f"
requires "karax#5cf360c"
requires "sass#7dfdd03"
requires "nimcrypto#4014ef9"
requires "nimcrypto#a079df9"
requires "markdown#158efe3"
requires "packedjson#9e6fbb6"
requires "supersnappy#6c94198"
@ -22,8 +22,8 @@ requires "redpool#8b7c1db"
requires "https://github.com/zedeus/redis#d0a0e6f"
requires "zippy#ca5989a"
requires "flatty#e668085"
requires "jsony#ea811be"
requires "jsony#1de1f08"
requires "oauth#b8c163b"
# Tasks

9
push-image.sh Normal file
View file

@ -0,0 +1,9 @@
#!/bin/bash
set -e
TODAY=$(date +%Y-%m-%d)
docker-compose push
docker image tag git.nolog.cz/nolog.cz/nitter:latest git.nolog.cz/nolog.cz/nitter:$TODAY
docker push git.nolog.cz/nolog.cz/nitter:$TODAY

View file

@ -33,23 +33,6 @@ proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profi
js = await fetch(url ? params, apiId)
result = parseGraphTimeline(js, "user", after)
# proc getTimeline*(id: string; after=""; replies=false): Future[Profile] {.async.} =
# if id.len == 0: return
# let
# ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
# url = oldUserTweets / (id & ".json") ? ps
# result = parseTimeline(await fetch(url, Api.timeline), after)
proc getUserTimeline*(id: string; after=""): Future[Profile] {.async.} =
var ps = genParams({"id": id})
if after.len > 0:
ps.add ("down_cursor", after)
let
url = legacyUserTweets ? ps
js = await fetch(url, Api.userTimeline)
result = parseUserTimeline(js, after)
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
@ -112,10 +95,10 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
if after.len > 0:
result.replies = await getReplies(id, after)
proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} =
proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Profile(tweets: Timeline(query: query, beginning: true))
return Timeline(query: query, beginning: true)
var
variables = %*{
@ -129,44 +112,29 @@ proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} =
if after.len > 0:
variables["cursor"] = % after
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = Profile(tweets: parseGraphSearch(await fetch(url, Api.search), after))
result.tweets.query = query
proc getTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
var q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Timeline(query: query, beginning: true)
if after.len > 0:
q &= " max_id:" & after
let url = tweetSearch ? genParams({
"q": q ,
"modules": "status",
"result_type": "recent",
})
result = parseTweetSearch(await fetch(url, Api.search), after)
result = parseGraphSearch[Tweets](await fetch(url, Api.search), after)
result.query = query
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
if query.text.len == 0:
return Result[User](query: query, beginning: true)
var url = userSearch ? {
"q": query.text,
"skip_status": "1",
"count": "20",
"page": page
}
var
variables = %*{
"rawQuery": query.text,
"count": 20,
"product": "People",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
result.beginning = false
result = parseUsers(await fetchRaw(url, Api.userSearch))
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch[User](await fetch(url, Api.search), after)
result.query = query
if page.len == 0:
result.bottom = "2"
elif page.allCharsInSet(Digits):
result.bottom = $(parseInt(page) + 1)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return

View file

@ -1,7 +1,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
import httpclient, asyncdispatch, options, strutils, uri
import jsony, packedjson, zippy
import types, tokens, consts, parserutils, http_pool
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
import jsony, packedjson, zippy, oauth1
import types, auth, consts, parserutils, http_pool
import experimental/types/common
const
@ -29,12 +29,30 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
else:
result &= ("cursor", cursor)
proc genHeaders*(token: Token = nil): HttpHeaders =
proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
let
encodedUrl = url.replace(",", "%2C").replace("+", "%20")
params = OAuth1Parameters(
consumerKey: consumerKey,
signatureMethod: "HMAC-SHA1",
timestamp: $int(round(epochTime())),
nonce: "0",
isIncludeVersionToHeader: true,
token: oauthToken
)
signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret)
params.signature = percentEncode(signature)
return getOauth1RequestHeader(params)["authorization"]
proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
let header = getOauthHeader(url, oauthToken, oauthTokenSecret)
result = newHttpHeaders({
"connection": "keep-alive",
"authorization": auth,
"authorization": header,
"content-type": "application/json",
"x-guest-token": if token == nil: "" else: token.tok,
"x-twitter-active-user": "yes",
"authority": "api.twitter.com",
"accept-encoding": "gzip",
@ -43,24 +61,18 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
"DNT": "1"
})
template updateToken() =
if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
token.setRateLimit(api, remaining, reset)
template fetchImpl(result, fetchBody) {.dirty.} =
once:
pool = HttpPool()
var token = await getToken(api)
if token.tok.len == 0:
var account = await getGuestAccount(api)
if account.oauthToken.len == 0:
echo "[accounts] Empty oauth token, account: ", account.id
raise rateLimitError()
try:
var resp: AsyncResponse
pool.use(genHeaders(token)):
pool.use(genHeaders($url, account.oauthToken, account.oauthSecret)):
template getContent =
resp = await c.get($url)
result = await resp.body
@ -71,57 +83,75 @@ template fetchImpl(result, fetchBody) {.dirty.} =
badClient = true
raise newException(BadClientError, "Bad client")
if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
account.setRateLimit(api, remaining, reset)
if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip":
result = uncompress(result, dfGzip)
else:
echo "non-gzip body, url: ", url, ", body: ", result
if result.startsWith("{\"errors"):
let errors = result.fromJson(Errors)
if errors in {expiredToken, badToken}:
echo "fetch error: ", errors
invalidate(account)
raise rateLimitError()
elif errors in {rateLimited}:
# rate limit hit, resets after 24 hours
setLimited(account, api)
raise rateLimitError()
elif result.startsWith("429 Too Many Requests"):
echo "[accounts] 429 error, API: ", api, ", account: ", account.id
account.apis[api].remaining = 0
# rate limit hit, resets after the 15 minute window
raise rateLimitError()
fetchBody
release(token, used=true)
if resp.status == $Http400:
raise newException(InternalError, $url)
except InternalError as e:
raise e
except BadClientError as e:
release(token, used=true)
raise e
except OSError as e:
raise e
except Exception as e:
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg:
release(token, invalid=true)
let id = if account.isNil: "null" else: $account.id
echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", id, ", url: ", url
raise rateLimitError()
finally:
release(account)
template retry(bod) =
try:
bod
except RateLimitError:
echo "[accounts] Rate limited, retrying ", api, " request..."
bod
proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} =
var body: string
fetchImpl body:
if body.startsWith('{') or body.startsWith('['):
result = parseJson(body)
else:
echo resp.status, ": ", body, " --- url: ", url
result = newJNull()
retry:
var body: string
fetchImpl body:
if body.startsWith('{') or body.startsWith('['):
result = parseJson(body)
else:
echo resp.status, ": ", body, " --- url: ", url
result = newJNull()
updateToken()
let error = result.getError
if error in {invalidToken, badToken}:
echo "fetch error: ", result.getError
release(token, invalid=true)
raise rateLimitError()
let error = result.getError
if error in {expiredToken, badToken}:
echo "fetchBody error: ", error
invalidate(account)
raise rateLimitError()
proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} =
fetchImpl result:
if not (result.startsWith('{') or result.startsWith('[')):
echo resp.status, ": ", result, " --- url: ", url
result.setLen(0)
updateToken()
if result.startsWith("{\"errors"):
let errors = result.fromJson(Errors)
if errors in {invalidToken, badToken}:
echo "fetch error: ", errors
release(token, invalid=true)
raise rateLimitError()
retry:
fetchImpl result:
if not (result.startsWith('{') or result.startsWith('[')):
echo resp.status, ": ", result, " --- url: ", url
result.setLen(0)

209
src/auth.nim Normal file
View file

@ -0,0 +1,209 @@
#SPDX-License-Identifier: AGPL-3.0-only
import std/[asyncdispatch, times, json, random, sequtils, strutils, tables, packedsets, os]
import types
import experimental/parser/guestaccount
# max requests at a time per account to avoid race conditions
const
maxConcurrentReqs = 2
dayInSeconds = 24 * 60 * 60
apiMaxReqs: Table[Api, int] = {
Api.search: 50,
Api.tweetDetail: 150,
Api.photoRail: 180,
Api.userTweets: 500,
Api.userTweetsAndReplies: 500,
Api.userMedia: 500,
Api.userRestId: 500,
Api.userScreenName: 500,
Api.tweetResult: 500,
Api.list: 500,
Api.listTweets: 500,
Api.listMembers: 500,
Api.listBySlug: 500
}.toTable
var
accountPool: seq[GuestAccount]
enableLogging = false
template log(str: varargs[string, `$`]) =
if enableLogging: echo "[accounts] ", str.join("")
proc snowflakeToEpoch(flake: int64): int64 =
int64(((flake shr 22) + 1288834974657) div 1000)
proc hasExpired(account: GuestAccount): bool =
let
created = snowflakeToEpoch(account.id)
now = epochTime().int64
daysOld = int(now - created) div dayInSeconds
return daysOld > 30
proc getAccountPoolHealth*(): JsonNode =
let now = epochTime().int
var
totalReqs = 0
limited: PackedSet[int64]
reqsPerApi: Table[string, int]
oldest = now.int64
newest = 0'i64
average = 0'i64
for account in accountPool:
let created = snowflakeToEpoch(account.id)
if created > newest:
newest = created
if created < oldest:
oldest = created
average += created
for api in account.apis.keys:
let
apiStatus = account.apis[api]
reqs = apiMaxReqs[api] - apiStatus.remaining
if apiStatus.limited:
limited.incl account.id
# no requests made with this account and endpoint since the limit reset
if apiStatus.reset < now:
continue
reqsPerApi.mgetOrPut($api, 0).inc reqs
totalReqs.inc reqs
if accountPool.len > 0:
average = average div accountPool.len
else:
oldest = 0
average = 0
return %*{
"accounts": %*{
"total": accountPool.len,
"limited": limited.card,
"oldest": $fromUnix(oldest),
"newest": $fromUnix(newest),
"average": $fromUnix(average)
},
"requests": %*{
"total": totalReqs,
"apis": reqsPerApi
}
}
proc getAccountPoolDebug*(): JsonNode =
let now = epochTime().int
var list = newJObject()
for account in accountPool:
let accountJson = %*{
"apis": newJObject(),
"pending": account.pending,
}
for api in account.apis.keys:
let
apiStatus = account.apis[api]
obj = %*{}
if apiStatus.reset > now.int:
obj["remaining"] = %apiStatus.remaining
if "remaining" notin obj and not apiStatus.limited:
continue
if apiStatus.limited:
obj["limited"] = %true
accountJson{"apis", $api} = obj
list[$account.id] = accountJson
return %list
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited")
proc isLimited(account: GuestAccount; api: Api): bool =
if account.isNil:
return true
if api in account.apis:
let limit = account.apis[api]
if limit.limited and (epochTime().int - limit.limitedAt) > dayInSeconds:
account.apis[api].limited = false
log "resetting limit, api: ", api, ", id: ", account.id
return limit.limited or (limit.remaining <= 10 and limit.reset > epochTime().int)
else:
return false
proc isReady(account: GuestAccount; api: Api): bool =
not (account.isNil or account.pending > maxConcurrentReqs or account.isLimited(api))
proc invalidate*(account: var GuestAccount) =
if account.isNil: return
log "invalidating expired account: ", account.id
# TODO: This isn't sufficient, but it works for now
let idx = accountPool.find(account)
if idx > -1: accountPool.delete(idx)
account = nil
proc release*(account: GuestAccount) =
if account.isNil: return
dec account.pending
proc getGuestAccount*(api: Api): Future[GuestAccount] {.async.} =
for i in 0 ..< accountPool.len:
if result.isReady(api): break
result = accountPool.sample()
if not result.isNil and result.isReady(api):
inc result.pending
else:
log "no accounts available for API: ", api
raise rateLimitError()
proc setLimited*(account: GuestAccount; api: Api) =
account.apis[api].limited = true
account.apis[api].limitedAt = epochTime().int
log "rate limited, api: ", api, ", reqs left: ", account.apis[api].remaining, ", id: ", account.id
proc setRateLimit*(account: GuestAccount; api: Api; remaining, reset: int) =
# avoid undefined behavior in race conditions
if api in account.apis:
let limit = account.apis[api]
if limit.reset >= reset and limit.remaining < remaining:
return
if limit.reset == reset and limit.remaining >= remaining:
account.apis[api].remaining = remaining
return
account.apis[api] = RateLimit(remaining: remaining, reset: reset)
proc initAccountPool*(cfg: Config; path: string) =
enableLogging = cfg.enableDebug
let jsonlPath = if path.endsWith(".json"): (path & 'l') else: path
if fileExists(jsonlPath):
log "Parsing JSONL guest accounts file: ", jsonlPath
for line in jsonlPath.lines:
accountPool.add parseGuestAccount(line)
elif fileExists(path):
log "Parsing JSON guest accounts file: ", path
accountPool = parseGuestAccounts(path)
else:
echo "[accounts] ERROR: ", path, " not found. This file is required to authenticate API requests."
quit 1
let accountsPrePurge = accountPool.len
accountPool.keepItIf(not it.hasExpired)
log "Successfully added ", accountPool.len, " valid accounts."
if accountsPrePurge > accountPool.len:
log "Purged ", accountsPrePurge - accountPool.len, " expired accounts."

View file

@ -2,17 +2,13 @@
import uri, sequtils, strutils
const
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF"
consumerKey* = "3nVuSoBZnx6U4vzUxf5w"
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
api = parseUri("https://api.twitter.com")
activate* = $(api / "1.1/guest/activate.json")
legacyUserTweets* = api / "1.1/timeline/user.json"
photoRail* = api / "1.1/statuses/media_timeline.json"
userSearch* = api / "1.1/users/search.json"
tweetSearch* = api / "1.1/search/universal.json"
# oldUserTweets* = api / "2/timeline/profile"
graphql = api / "graphql"
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
@ -20,7 +16,7 @@ const
graphUserTweets* = graphql / "3JNH4e9dq1BifLxAa3UMWg/UserWithProfileTweetsQueryV2"
graphUserTweetsAndReplies* = graphql / "8IS8MaO-2EN6GZZZb8jF0g/UserWithProfileTweetsAndRepliesQueryV2"
graphUserMedia* = graphql / "PDfFf8hGeJvUCiTyWtw4wQ/MediaTimelineV2"
graphTweet* = graphql / "83h5UyHZ9wEKBVzALX8R_g/ConversationTimelineV2"
graphTweet* = graphql / "q94uRCEn65LZThakYcPT6g/TweetDetail"
graphTweetResult* = graphql / "sITyJdhRPpvpEjg4waUmTA/TweetResultByIdQuery"
graphSearchTimeline* = graphql / "gkjsKepM6gl_HmFWoWKfgg/SearchTimeline"
graphListById* = graphql / "iTpgCtbdxrsJfyx0cFjHqg/ListByRestId"
@ -38,6 +34,7 @@ const
"include_user_entities": "1",
"include_ext_reply_count": "1",
"include_ext_is_blue_verified": "1",
# "include_ext_verified_type": "1",
"include_ext_media_color": "0",
"cards_platform": "Web-13",
"tweet_mode": "extended",
@ -91,8 +88,12 @@ const
tweetVariables* = """{
"focalTweetId": "$1",
$2
"includeHasBirdwatchNotes": false
}"""
"includeHasBirdwatchNotes": false,
"includePromotedContent": false,
"withBirdwatchNotes": false,
"withVoice": false,
"withV2Timeline": true
}""".replace(" ", "").replace("\n", "")
# oldUserTweetsVariables* = """{
# "userId": "$1", $2

View file

@ -0,0 +1,21 @@
import std/strutils
import jsony
import ../types/guestaccount
from ../../types import GuestAccount
proc toGuestAccount(account: RawAccount): GuestAccount =
let id = account.oauthToken[0 ..< account.oauthToken.find('-')]
result = GuestAccount(
id: parseBiggestInt(id),
oauthToken: account.oauthToken,
oauthSecret: account.oauthTokenSecret
)
proc parseGuestAccount*(raw: string): GuestAccount =
let rawAccount = raw.fromJson(RawAccount)
result = rawAccount.toGuestAccount
proc parseGuestAccounts*(path: string): seq[GuestAccount] =
let rawAccounts = readFile(path).fromJson(seq[RawAccount])
for account in rawAccounts:
result.add account.toGuestAccount

View file

@ -56,7 +56,7 @@ proc toUser*(raw: RawUser): User =
tweets: raw.statusesCount,
likes: raw.favouritesCount,
media: raw.mediaCount,
verified: raw.verified,
verified: raw.verified or raw.extIsBlueVerified,
protected: raw.protected,
joinDate: parseTwitterDate(raw.createdAt),
banner: getBanner(raw),

View file

@ -0,0 +1,4 @@
type
RawAccount* = object
oauthToken*: string
oauthTokenSecret*: string

View file

@ -16,6 +16,7 @@ type
statusesCount*: int
mediaCount*: int
verified*: bool
extIsBlueVerified*: bool
protected*: bool
profileLinkColor*: string
profileBannerUrl*: string

View file

@ -39,11 +39,8 @@ template use*(pool: HttpPool; heads: HttpHeaders; body: untyped): untyped =
try:
body
except ProtocolError:
# Twitter closed the connection, retry
body
except BadClientError:
# Twitter returned 503, we need a new client
except BadClientError, ProtocolError:
# Twitter returned 503 or closed the connection, we need a new client
pool.release(c, true)
badClient = false
c = pool.acquire(heads)

View file

@ -6,7 +6,7 @@ from os import getEnv
import jester
import types, config, prefs, formatters, redis_cache, http_pool, tokens
import types, config, prefs, formatters, redis_cache, http_pool, auth
import views/[general, about]
import routes/[
preferences, timeline, status, media, search, rss, list, debug,
@ -15,8 +15,13 @@ import routes/[
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
const issuesUrl = "https://github.com/zedeus/nitter/issues"
let configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf")
let (cfg, fullCfg) = getConfig(configPath)
let
configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf")
(cfg, fullCfg) = getConfig(configPath)
accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
initAccountPool(cfg, accountsPath)
if not cfg.enableDebug:
# Silence Jester's query warning
@ -38,8 +43,6 @@ waitFor initRedisPool(cfg)
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
stdout.flushFile
asyncCheck initTokenPool(cfg)
createUnsupportedRouter(cfg)
createResolverRouter(cfg)
createPrefRouter(cfg)

View file

@ -1,10 +1,10 @@
# SPDX-License-Identifier: AGPL-3.0-only
import strutils, options, times, math, tables
import strutils, options, times, math
import packedjson, packedjson/deserialiser
import types, parserutils, utils
import experimental/parser/unifiedcard
proc parseGraphTweet(js: JsonNode): Tweet
proc parseGraphTweet(js: JsonNode; isLegacy=false): Tweet
proc parseUser(js: JsonNode; id=""): User =
if js.isNull: return
@ -29,7 +29,9 @@ proc parseUser(js: JsonNode; id=""): User =
result.expandUserEntities(js)
proc parseGraphUser(js: JsonNode): User =
let user = ? js{"user_result", "result"}
var user = js{"user_result", "result"}
if user.isNull:
user = ? js{"user_results", "result"}
result = parseUser(user{"legacy"})
if "is_blue_verified" in user:
@ -287,169 +289,6 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
result.text.removeSuffix(" Learn more.")
result.available = false
proc parseLegacyTweet(js: JsonNode): Tweet =
result = parseTweet(js, js{"card"})
if not result.isNil and result.available:
result.user = parseUser(js{"user"})
if result.quote.isSome:
result.quote = some parseLegacyTweet(js{"quoted_status"})
proc parseTweetSearch*(js: JsonNode; after=""): Timeline =
result.beginning = after.len == 0
if js.kind == JNull or "modules" notin js or js{"modules"}.len == 0:
return
for item in js{"modules"}:
with tweet, item{"status", "data"}:
let parsed = parseLegacyTweet(tweet)
if parsed.retweet.isSome:
parsed.retweet = some parseLegacyTweet(tweet{"retweeted_status"})
result.content.add @[parsed]
if result.content.len > 0:
result.bottom = $(result.content[^1][0].id - 1)
proc parseUserTimelineTweet(tweet: JsonNode; users: TableRef[string, User]): Tweet =
result = parseTweet(tweet, tweet{"card"})
if result.isNil or not result.available:
return
with user, tweet{"user"}:
let userId = user{"id_str"}.getStr
if user{"ext_is_blue_verified"}.getBool(false):
users[userId].verified = users[userId].verified or true
result.user = users[userId]
proc parseUserTimeline*(js: JsonNode; after=""): Profile =
result = Profile(tweets: Timeline(beginning: after.len == 0))
if js.kind == JNull or "response" notin js or "twitter_objects" notin js:
return
var users = newTable[string, User]()
for userId, user in js{"twitter_objects", "users"}:
users[userId] = parseUser(user)
for entity in js{"response", "timeline"}:
let
tweetId = entity{"tweet", "id"}.getId
isPinned = entity{"tweet", "is_pinned"}.getBool(false)
with tweet, js{"twitter_objects", "tweets", $tweetId}:
var parsed = parseUserTimelineTweet(tweet, users)
if not parsed.isNil and parsed.available:
if parsed.quote.isSome:
parsed.quote = some parseUserTimelineTweet(tweet{"quoted_status"}, users)
if parsed.retweet.isSome:
let retweet = parseUserTimelineTweet(tweet{"retweeted_status"}, users)
if retweet.quote.isSome:
retweet.quote = some parseUserTimelineTweet(tweet{"retweeted_status", "quoted_status"}, users)
parsed.retweet = some retweet
if isPinned:
parsed.pinned = true
result.pinned = some parsed
else:
result.tweets.content.add parsed
result.tweets.bottom = js{"response", "cursor", "bottom"}.getStr
# proc finalizeTweet(global: GlobalObjects; id: string): Tweet =
# let intId = if id.len > 0: parseBiggestInt(id) else: 0
# result = global.tweets.getOrDefault(id, Tweet(id: intId))
# if result.quote.isSome:
# let quote = get(result.quote).id
# if $quote in global.tweets:
# result.quote = some global.tweets[$quote]
# else:
# result.quote = some Tweet()
# if result.retweet.isSome:
# let rt = get(result.retweet).id
# if $rt in global.tweets:
# result.retweet = some finalizeTweet(global, $rt)
# else:
# result.retweet = some Tweet()
# proc parsePin(js: JsonNode; global: GlobalObjects): Tweet =
# let pin = js{"pinEntry", "entry", "entryId"}.getStr
# if pin.len == 0: return
# let id = pin.getId
# if id notin global.tweets: return
# global.tweets[id].pinned = true
# return finalizeTweet(global, id)
# proc parseGlobalObjects(js: JsonNode): GlobalObjects =
# result = GlobalObjects()
# let
# tweets = ? js{"globalObjects", "tweets"}
# users = ? js{"globalObjects", "users"}
# for k, v in users:
# result.users[k] = parseUser(v, k)
# for k, v in tweets:
# var tweet = parseTweet(v, v{"card"})
# if tweet.user.id in result.users:
# tweet.user = result.users[tweet.user.id]
# result.tweets[k] = tweet
# proc parseInstructions(res: var Profile; global: GlobalObjects; js: JsonNode) =
# if js.kind != JArray or js.len == 0:
# return
# for i in js:
# if res.tweets.beginning and i{"pinEntry"}.notNull:
# with pin, parsePin(i, global):
# res.pinned = some pin
# with r, i{"replaceEntry", "entry"}:
# if "top" in r{"entryId"}.getStr:
# res.tweets.top = r.getCursor
# elif "bottom" in r{"entryId"}.getStr:
# res.tweets.bottom = r.getCursor
# proc parseTimeline*(js: JsonNode; after=""): Profile =
# result = Profile(tweets: Timeline(beginning: after.len == 0))
# let global = parseGlobalObjects(? js)
# let instructions = ? js{"timeline", "instructions"}
# if instructions.len == 0: return
# result.parseInstructions(global, instructions)
# var entries: JsonNode
# for i in instructions:
# if "addEntries" in i:
# entries = i{"addEntries", "entries"}
# for e in ? entries:
# let entry = e{"entryId"}.getStr
# if "tweet" in entry or entry.startsWith("sq-I-t") or "tombstone" in entry:
# let tweet = finalizeTweet(global, e.getEntryId)
# if not tweet.available: continue
# result.tweets.content.add tweet
# elif "cursor-top" in entry:
# result.tweets.top = e.getCursor
# elif "cursor-bottom" in entry:
# result.tweets.bottom = e.getCursor
# elif entry.startsWith("sq-cursor"):
# with cursor, e{"content", "operation", "cursor"}:
# if cursor{"cursorType"}.getStr == "Bottom":
# result.tweets.bottom = cursor{"value"}.getStr
# else:
# result.tweets.top = cursor{"value"}.getStr
proc parsePhotoRail*(js: JsonNode): PhotoRail =
with error, js{"error"}:
if error.getStr == "Not authorized.":
@ -467,7 +306,7 @@ proc parsePhotoRail*(js: JsonNode): PhotoRail =
if url.len == 0: continue
result.add GalleryPhoto(url: url, tweetId: $t.id)
proc parseGraphTweet(js: JsonNode): Tweet =
proc parseGraphTweet(js: JsonNode; isLegacy=false): Tweet =
if js.kind == JNull:
return Tweet()
@ -483,9 +322,14 @@ proc parseGraphTweet(js: JsonNode): Tweet =
of "TweetPreviewDisplay":
return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.")
of "TweetWithVisibilityResults":
return parseGraphTweet(js{"tweet"})
return parseGraphTweet(js{"tweet"}, isLegacy)
else:
discard
var jsCard = copy(js{"tweet_card", "legacy"})
if not js.hasKey("legacy"):
return Tweet()
var jsCard = copy(js{if isLegacy: "card" else: "tweet_card", "legacy"})
if jsCard.kind != JNull:
var values = newJObject()
for val in jsCard["binding_values"]:
@ -500,10 +344,9 @@ proc parseGraphTweet(js: JsonNode): Tweet =
result.expandNoteTweetEntities(noteTweet)
if result.quote.isSome:
result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"}))
result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"}, isLegacy))
proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
let thread = js{"content", "items"}
for t in js{"content", "items"}:
let entryId = t{"entryId"}.getStr
if "cursor-showmore" in entryId:
@ -511,28 +354,33 @@ proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
result.thread.cursor = cursor.getStr
result.thread.hasMore = true
elif "tweet" in entryId:
let tweet = parseGraphTweet(t{"item", "content", "tweetResult", "result"})
result.thread.content.add tweet
let
isLegacy = t{"item"}.hasKey("itemContent")
(contentKey, resultKey) = if isLegacy: ("itemContent", "tweet_results")
else: ("content", "tweetResult")
if t{"item", "content", "tweetDisplayType"}.getStr == "SelfThread":
result.self = true
with content, t{"item", contentKey}:
result.thread.content.add parseGraphTweet(content{resultKey, "result"}, isLegacy)
if content{"tweetDisplayType"}.getStr == "SelfThread":
result.self = true
proc parseGraphTweetResult*(js: JsonNode): Tweet =
with tweet, js{"data", "tweet_result", "result"}:
result = parseGraphTweet(tweet)
result = parseGraphTweet(tweet, false)
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result = Conversation(replies: Result[Chain](beginning: true))
let instructions = ? js{"data", "timeline_response", "instructions"}
let instructions = ? js{"data", "threaded_conversation_with_injections_v2", "instructions"}
if instructions.len == 0:
return
for e in instructions[0]{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "content", "tweetResult", "result"}:
let tweet = parseGraphTweet(tweetResult)
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult, true)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
@ -546,7 +394,7 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
let tweet = Tweet(
id: parseBiggestInt(id),
available: false,
text: e{"content", "content", "tombstoneInfo", "richText"}.getTombstone
text: e{"content", "itemContent", "tombstoneInfo", "richText"}.getTombstone
)
if id == tweetId:
@ -560,7 +408,7 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
else:
result.replies.content.add thread
elif entryId.startsWith("cursor-bottom"):
result.replies.bottom = e{"content", "content", "value"}.getStr
result.replies.bottom = e{"content", "itemContent", "value"}.getStr
proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
result = Profile(tweets: Timeline(beginning: after.len == 0))
@ -578,7 +426,7 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "content", "tweetResult", "result"}:
let tweet = parseGraphTweet(tweetResult)
let tweet = parseGraphTweet(tweetResult, false)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.tweets.content.add tweet
@ -589,7 +437,7 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
result.tweets.bottom = e{"content", "value"}.getStr
if after.len == 0 and i{"__typename"}.getStr == "TimelinePinEntry":
with tweetResult, i{"entry", "content", "content", "tweetResult", "result"}:
let tweet = parseGraphTweet(tweetResult)
let tweet = parseGraphTweet(tweetResult, false)
tweet.pinned = true
if not tweet.available and tweet.tombstone.len == 0:
let entryId = i{"entry", "entryId"}.getEntryId
@ -597,8 +445,8 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
tweet.id = parseBiggestInt(entryId)
result.pinned = some tweet
proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
result = Timeline(beginning: after.len == 0)
proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
result = Result[T](beginning: after.len == 0)
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
if instructions.len == 0:
@ -607,15 +455,21 @@ proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
for instruction in instructions:
let typ = instruction{"type"}.getStr
if typ == "TimelineAddEntries":
for e in instructions[0]{"entries"}:
for e in instruction{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif entryId.startsWith("cursor-bottom"):
when T is Tweets:
if entryId.startsWith("tweet"):
with tweetRes, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetRes)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif T is User:
if entryId.startsWith("user"):
with userRes, e{"content", "itemContent"}:
result.content.add parseGraphUser(userRes)
if entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
elif typ == "TimelineReplaceEntry":
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):

View file

@ -36,7 +36,8 @@ template with*(ident, value, body): untyped =
template with*(ident; value: JsonNode; body): untyped =
if true:
let ident {.inject.} = value
if value.notNull: body
# value.notNull causes a compilation error for versions < 1.6.14
if notNull(value): body
template getCursor*(js: JsonNode): string =
js{"content", "operation", "cursor", "value"}.getStr

View file

@ -60,7 +60,7 @@ proc genQueryParam*(query: Query): string =
param &= "OR "
if query.fromUser.len > 0 and query.kind in {posts, media}:
param &= "filter:self_threads OR-filter:replies "
param &= "filter:self_threads OR -filter:replies "
if "nativeretweets" notin query.excludes:
param &= "include:nativeretweets "

View file

@ -85,7 +85,7 @@ proc cache*(data: List) {.async.} =
await setEx(data.listKey, listCacheTime, compress(toFlatty(data)))
proc cache*(data: PhotoRail; name: string) {.async.} =
await setEx("pr:" & toLower(name), baseCacheTime, compress(toFlatty(data)))
await setEx("pr:" & toLower(name), baseCacheTime * 2, compress(toFlatty(data)))
proc cache*(data: User) {.async.} =
if data.username.len == 0: return
@ -147,15 +147,15 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
if result.len > 0 and user.id.len > 0:
await all(cacheUserId(result, user.id), cache(user))
proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
if id == 0: return
let tweet = await get(id.tweetKey)
if tweet != redisNil:
tweet.deserialize(Tweet)
else:
result = await getGraphTweetResult($id)
if not result.isNil:
await cache(result)
# proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
# if id == 0: return
# let tweet = await get(id.tweetKey)
# if tweet != redisNil:
# tweet.deserialize(Tweet)
# else:
# result = await getGraphTweetResult($id)
# if not result.isNil:
# await cache(result)
proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return

View file

@ -1,10 +1,13 @@
# SPDX-License-Identifier: AGPL-3.0-only
import jester
import router_utils
import ".."/[tokens, types]
import ".."/[auth, types]
proc createDebugRouter*(cfg: Config) =
router debug:
get "/.tokens":
get "/.health":
respJson getAccountPoolHealth()
get "/.accounts":
cond cfg.enableDebug
respJson getPoolJson()
respJson getAccountPoolDebug()

View file

@ -37,6 +37,7 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
try:
let res = await client.get(url)
if res.status != "200 OK":
echo "[media] Proxying failed, status: $1, url: $2" % [res.status, url]
return Http404
let hashed = $hash(url)
@ -65,6 +66,7 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
await request.client.send(data)
data.setLen 0
except HttpRequestError, ProtocolError, OSError:
echo "[media] Proxying exception, error: $1, url: $2" % [getCurrentExceptionMsg(), url]
result = Http404
finally:
client.close()

View file

@ -27,7 +27,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
else:
var q = query
q.fromUser = names
profile.tweets = await getTweetSearch(q, after)
profile.tweets = await getGraphTweetSearch(q, after)
# this is kinda dumb
profile.user = User(
username: name,
@ -76,7 +76,7 @@ proc createRssRouter*(cfg: Config) =
if rss.cursor.len > 0:
respRss(rss, "Search")
let tweets = await getTweetSearch(query, cursor)
let tweets = await getGraphTweetSearch(query, cursor)
rss.cursor = tweets.bottom
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)

View file

@ -29,13 +29,13 @@ proc createSearchRouter*(cfg: Config) =
redirect("/" & q)
var users: Result[User]
try:
users = await getUserSearch(query, getCursor())
users = await getGraphUserSearch(query, getCursor())
except InternalError:
users = Result[User](beginning: true, query: query)
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
of tweets:
let
tweets = await getTweetSearch(query, getCursor())
tweets = await getGraphTweetSearch(query, getCursor())
rss = "/search/rss?" & genQueryUrl(query)
resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
request, cfg, prefs, title, rss=rss)

View file

@ -53,10 +53,10 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
result =
case query.kind
of posts: await getUserTimeline(userId, after)
of posts: await getGraphUserTweets(userId, TimelineKind.tweets, after)
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after)
of media: await getGraphUserTweets(userId, TimelineKind.media, after)
else: Profile(tweets: await getTweetSearch(query, after))
else: Profile(tweets: await getGraphTweetSearch(query, after))
result.user = await user
result.photoRail = await rail
@ -67,7 +67,7 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1:
let
timeline = await getTweetSearch(query, after)
timeline = await getGraphTweetSearch(query, after)
html = renderTweetSearch(timeline, prefs, getPath())
return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
@ -122,7 +122,7 @@ proc createTimelineRouter*(cfg: Config) =
# used for the infinite scroll feature
if @"scroll".len > 0:
if query.fromUser.len != 1:
var timeline = await getTweetSearch(query, after)
var timeline = await getGraphTweetSearch(query, after)
if timeline.content.len == 0: resp Http404
timeline.beginning = true
resp $renderTweetSearch(timeline, prefs, getPath())

View file

@ -70,8 +70,9 @@ nav {
.lp {
height: 14px;
margin-top: 2px;
display: block;
display: inline-block;
position: relative;
top: 2px;
fill: var(--fg_nav);
&:hover {

View file

@ -115,7 +115,7 @@
}
.profile-card-tabs-name {
@include breakable;
flex-shrink: 100;
}
.profile-card-avatar {

View file

@ -1,166 +0,0 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, times, sequtils, json, random
import strutils, tables
import types, consts
const
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
maxAge = 2.hours + 55.minutes # tokens expire after 3 hours
failDelay = initDuration(minutes=30)
var
tokenPool: seq[Token]
lastFailed: Time
enableLogging = false
let headers = newHttpHeaders({"authorization": auth})
template log(str) =
if enableLogging: echo "[tokens] ", str
proc getPoolJson*(): JsonNode =
var
list = newJObject()
totalReqs = 0
totalPending = 0
reqsPerApi: Table[string, int]
for token in tokenPool:
totalPending.inc(token.pending)
list[token.tok] = %*{
"apis": newJObject(),
"pending": token.pending,
"init": $token.init,
"lastUse": $token.lastUse
}
for api in token.apis.keys:
list[token.tok]["apis"][$api] = %token.apis[api]
let
maxReqs =
case api
of Api.search: 100000
of Api.photoRail: 180
of Api.timeline: 187
of Api.userTweets, Api.userTimeline: 300
of Api.userTweetsAndReplies, Api.userRestId,
Api.userScreenName, Api.tweetDetail, Api.tweetResult,
Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.userMedia: 500
of Api.userSearch: 900
reqs = maxReqs - token.apis[api].remaining
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
totalReqs.inc(reqs)
return %*{
"amount": tokenPool.len,
"requests": totalReqs,
"pending": totalPending,
"apis": reqsPerApi,
"tokens": list
}
proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited")
proc fetchToken(): Future[Token] {.async.} =
if getTime() - lastFailed < failDelay:
raise rateLimitError()
let client = newAsyncHttpClient(headers=headers)
try:
let
resp = await client.postContent(activate)
tokNode = parseJson(resp)["guest_token"]
tok = tokNode.getStr($(tokNode.getInt))
time = getTime()
return Token(tok: tok, init: time, lastUse: time)
except Exception as e:
echo "[tokens] fetching token failed: ", e.msg
if "Try again" notin e.msg:
echo "[tokens] fetching tokens paused, resuming in 30 minutes"
lastFailed = getTime()
finally:
client.close()
proc expired(token: Token): bool =
let time = getTime()
token.init < time - maxAge or token.lastUse < time - maxLastUse
proc isLimited(token: Token; api: Api): bool =
if token.isNil or token.expired:
return true
if api in token.apis:
let limit = token.apis[api]
return (limit.remaining <= 10 and limit.reset > epochTime().int)
else:
return false
proc isReady(token: Token; api: Api): bool =
not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api))
proc release*(token: Token; used=false; invalid=false) =
if token.isNil: return
if invalid or token.expired:
if invalid: log "discarding invalid token"
elif token.expired: log "discarding expired token"
let idx = tokenPool.find(token)
if idx > -1: tokenPool.delete(idx)
elif used:
dec token.pending
token.lastUse = getTime()
proc getToken*(api: Api): Future[Token] {.async.} =
for i in 0 ..< tokenPool.len:
if result.isReady(api): break
release(result)
result = tokenPool.sample()
if not result.isReady(api):
release(result)
result = await fetchToken()
log "added new token to pool"
tokenPool.add result
if not result.isNil:
inc result.pending
else:
raise rateLimitError()
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
# avoid undefined behavior in race conditions
if api in token.apis:
let limit = token.apis[api]
if limit.reset >= reset and limit.remaining < remaining:
return
token.apis[api] = RateLimit(remaining: remaining, reset: reset)
proc poolTokens*(amount: int) {.async.} =
var futs: seq[Future[Token]]
for i in 0 ..< amount:
futs.add fetchToken()
for token in futs:
var newToken: Token
try: newToken = await token
except: discard
if not newToken.isNil:
log "added new token to pool"
tokenPool.add newToken
proc initTokenPool*(cfg: Config) {.async.} =
enableLogging = cfg.enableDebug
while true:
if tokenPool.countIt(not it.isLimited(Api.userTimeline)) < cfg.minTokens:
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
await sleepAsync(2000)

View file

@ -17,11 +17,8 @@ type
Api* {.pure.} = enum
tweetDetail
tweetResult
timeline
userTimeline
photoRail
search
userSearch
list
listBySlug
listMembers
@ -35,11 +32,13 @@ type
RateLimit* = object
remaining*: int
reset*: int
limited*: bool
limitedAt*: int
Token* = ref object
tok*: string
init*: Time
lastUse*: Time
GuestAccount* = ref object
id*: int64
oauthToken*: string
oauthSecret*: string
pending*: int
apis*: Table[Api, RateLimit]
@ -54,7 +53,7 @@ type
userNotFound = 50
suspended = 63
rateLimited = 88
invalidToken = 89
expiredToken = 89
listIdOrSlug = 112
tweetNotFound = 144
tweetNotAuthorized = 179
@ -165,7 +164,7 @@ type
newsletterPublication = "newsletter_publication"
hidden
unknown
Card* = object
kind*: CardKind
url*: string

View file

@ -13,11 +13,6 @@ card = [
'Basic OBS Studio plugin, written in nim, supporting C++ (C fine too) - obsplugin.nim',
'gist.github.com', True],
['FluentAI/status/1116417904831029248',
'Amazons Alexa isnt just AI — thousands of humans are listening',
'One of the only ways to improve Alexa is to have human beings check it for errors',
'theverge.com', True],
['nim_lang/status/1082989146040340480',
'Nim in 2018: A short recap',
'There were several big news in the Nim world in 2018 two new major releases, partnership with Status, and much more. But let us go chronologically.',
@ -25,6 +20,11 @@ card = [
]
no_thumb = [
['FluentAI/status/1116417904831029248',
'LinkedIn',
'This link will take you to a page thats not on LinkedIn',
'lnkd.in'],
['Thom_Wolf/status/1122466524860702729',
'facebookresearch/fairseq',
'Facebook AI Research Sequence-to-Sequence Toolkit written in Python. - GitHub - facebookresearch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in Python.',

View file

@ -6,7 +6,7 @@ normal = [['jack'], ['elonmusk']]
after = [['jack', '1681686036294803456'],
['elonmusk', '1681686036294803456']]
no_more = [['mobile_test_8?cursor=1000']]
no_more = [['mobile_test_8?cursor=DAABCgABF4YVAqN___kKAAICNn_4msIQAAgAAwAAAAIAAA']]
empty = [['emptyuser'], ['mobile_test_10']]

View file

@ -14,7 +14,7 @@ poll = [
image = [
['mobile_test/status/519364660823207936', 'BzUnaDFCUAAmrjs'],
['mobile_test_2/status/324619691039543297', 'BIFH45vCUAAQecj']
#['mobile_test_2/status/324619691039543297', 'BIFH45vCUAAQecj']
]
gif = [

View file

@ -3,14 +3,14 @@ set -e
TODAY=$(date +%Y-%m-%d)
git checkout master
git checkout guest_accounts
git fetch github
git merge github/master -m "Merge github/master"
git merge github/guest_accounts -m "Merge github/master"
# Build image
docker-compose up --build
docker-compose push
#docker-compose push
docker image tag git.nolog.cz/nolog.cz/nitter:latest git.nolog.cz/nolog.cz/nitter:$TODAY
docker push git.nolog.cz/nolog.cz/nitter:$TODAY
#docker image tag git.nolog.cz/nolog.cz/nitter:latest git.nolog.cz/nolog.cz/nitter:$TODAY
#docker push git.nolog.cz/nolog.cz/nitter:$TODAY