Merge remote-tracking branch 'github/master'
This commit is contained in:
commit
0c9677ea41
28 changed files with 343 additions and 174 deletions
44
.github/workflows/build-docker.yml
vendored
44
.github/workflows/build-docker.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: CI/CD
|
||||
name: Docker
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -8,31 +8,51 @@ on:
|
|||
- master
|
||||
|
||||
jobs:
|
||||
build-docker:
|
||||
runs-on: ubuntu-latest
|
||||
build-docker-amd64:
|
||||
runs-on: buildjet-2vcpu-ubuntu-2204
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: all
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
- name: Build and push AMD64 Docker image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: zedeus/nitter:latest,zedeus/nitter:${{ github.sha }}
|
||||
build-docker-arm64:
|
||||
runs-on: buildjet-2vcpu-ubuntu-2204-arm
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
version: latest
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Build and push ARM64 Docker image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.arm64
|
||||
platforms: linux/arm64
|
||||
push: true
|
||||
tags: zedeus/nitter:latest-arm64,zedeus/nitter:${{ github.sha }}-arm64
|
||||
|
|
42
.github/workflows/run-tests.yml
vendored
Normal file
42
.github/workflows/run-tests.yml
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
name: Run tests
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "*.md"
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Cache nimble
|
||||
id: cache-nimble
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.nimble
|
||||
key: nimble-${{ hashFiles('*.nimble') }}
|
||||
restore-keys: "nimble-"
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.10"
|
||||
cache: "pip"
|
||||
- uses: jiro4989/setup-nim-action@v1
|
||||
with:
|
||||
nim-version: "1.x"
|
||||
- run: nimble build -d:release -Y
|
||||
- run: pip install seleniumbase
|
||||
- run: seleniumbase install chromedriver
|
||||
- uses: supercharge/redis-github-action@1.5.0
|
||||
- name: Prepare Nitter
|
||||
run: |
|
||||
sudo apt install libsass-dev -y
|
||||
cp nitter.example.conf nitter.conf
|
||||
nimble md
|
||||
nimble scss
|
||||
- name: Run tests
|
||||
run: |
|
||||
./nitter &
|
||||
pytest -n4 tests
|
|
@ -20,4 +20,6 @@ COPY --from=nim /src/nitter/nitter ./
|
|||
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
|
||||
COPY --from=nim /src/nitter/public ./public
|
||||
EXPOSE 8080
|
||||
RUN adduser -h /src/ -D -s /bin/sh nitter
|
||||
USER nitter
|
||||
CMD ./nitter
|
||||
|
|
23
Dockerfile.arm64
Normal file
23
Dockerfile.arm64
Normal file
|
@ -0,0 +1,23 @@
|
|||
FROM alpine:3.17 as nim
|
||||
LABEL maintainer="setenforce@protonmail.com"
|
||||
|
||||
RUN apk --no-cache add gcc git libc-dev libsass-dev "nim=1.6.8-r0" nimble pcre
|
||||
|
||||
WORKDIR /src/nitter
|
||||
|
||||
COPY nitter.nimble .
|
||||
RUN nimble install -y --depsOnly
|
||||
|
||||
COPY . .
|
||||
RUN nimble build -d:danger -d:lto -d:strip \
|
||||
&& nimble scss \
|
||||
&& nimble md
|
||||
|
||||
FROM alpine:3.17
|
||||
WORKDIR /src/
|
||||
RUN apk --no-cache add ca-certificates pcre openssl1.1-compat
|
||||
COPY --from=nim /src/nitter/nitter ./
|
||||
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
|
||||
COPY --from=nim /src/nitter/public ./public
|
||||
EXPOSE 8080
|
||||
CMD ./nitter
|
|
@ -109,7 +109,9 @@ performance reasons.
|
|||
|
||||
### Docker
|
||||
|
||||
#### NOTE: For ARM64/ARM support, please use [unixfox's image](https://quay.io/repository/unixfox/nitter?tab=tags), more info [here](https://github.com/zedeus/nitter/issues/399#issuecomment-997263495)
|
||||
Page for the Docker image: https://hub.docker.com/r/zedeus/nitter
|
||||
|
||||
#### NOTE: For ARM64 support, please use the separate ARM64 docker image: [`zedeus/nitter:latest-arm64`](https://hub.docker.com/r/zedeus/nitter/tags).
|
||||
|
||||
To run Nitter with Docker, you'll need to install and run Redis separately
|
||||
before you can run the container. See below for how to also run Redis using
|
||||
|
@ -122,6 +124,8 @@ docker build -t nitter:latest .
|
|||
docker run -v $(pwd)/nitter.conf:/src/nitter.conf -d --network host nitter:latest
|
||||
```
|
||||
|
||||
Note: For ARM64, use this Dockerfile: [`Dockerfile.arm64`](https://github.com/zedeus/nitter/blob/master/Dockerfile.arm64).
|
||||
|
||||
A prebuilt Docker image is provided as well:
|
||||
|
||||
```bash
|
||||
|
|
|
@ -9,7 +9,7 @@ services:
|
|||
ports:
|
||||
- "8080:8080" # Replace with "8080:8080" if you don't use a reverse proxy
|
||||
volumes:
|
||||
- ./nitter.conf:/src/nitter.conf:ro
|
||||
- ./nitter.conf:/src/nitter.conf:Z,ro
|
||||
depends_on:
|
||||
- nitter-redis
|
||||
restart: unless-stopped
|
||||
|
@ -18,6 +18,12 @@ services:
|
|||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 2
|
||||
user: "998:998"
|
||||
read_only: true
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- ALL
|
||||
|
||||
nitter-redis:
|
||||
image: redis:6-alpine
|
||||
|
@ -31,6 +37,12 @@ services:
|
|||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 2
|
||||
user: "999:1000"
|
||||
read_only: true
|
||||
security_opt:
|
||||
- no-new-privileges:true
|
||||
cap_drop:
|
||||
- ALL
|
||||
|
||||
volumes:
|
||||
nitter-redis:
|
||||
|
|
|
@ -39,7 +39,6 @@ theme = "Nitter"
|
|||
replaceTwitter = "nitter.net"
|
||||
replaceYouTube = "piped.video"
|
||||
replaceReddit = "teddit.net"
|
||||
replaceInstagram = ""
|
||||
proxyVideos = true
|
||||
hlsPlayback = false
|
||||
infiniteScroll = false
|
||||
|
|
|
@ -12,17 +12,17 @@ bin = @["nitter"]
|
|||
|
||||
requires "nim >= 1.4.8"
|
||||
requires "jester#baca3f"
|
||||
requires "karax#6abcb77"
|
||||
requires "sass#e683aa1"
|
||||
requires "nimcrypto#b41129f"
|
||||
requires "markdown#a661c26"
|
||||
requires "karax#9ee695b"
|
||||
requires "sass#7dfdd03"
|
||||
requires "nimcrypto#4014ef9"
|
||||
requires "markdown#158efe3"
|
||||
requires "packedjson#9e6fbb6"
|
||||
requires "supersnappy#6c94198"
|
||||
requires "redpool#8b7c1db"
|
||||
requires "https://github.com/zedeus/redis#d0a0e6f"
|
||||
requires "zippy#61922b9"
|
||||
requires "flatty#9f885d7"
|
||||
requires "jsony#d0e69bd"
|
||||
requires "zippy#ca5989a"
|
||||
requires "flatty#e668085"
|
||||
requires "jsony#ea811be"
|
||||
|
||||
|
||||
# Tasks
|
||||
|
|
4
public/js/hls.light.min.js
vendored
4
public/js/hls.light.min.js
vendored
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,5 @@
|
|||
User-agent: *
|
||||
Disallow: /
|
||||
Crawl-delay: 1
|
||||
User-agent: Twitterbot
|
||||
Disallow:
|
||||
|
|
46
src/api.nim
46
src/api.nim
|
@ -4,11 +4,22 @@ import packedjson
|
|||
import types, query, formatters, consts, apiutils, parser
|
||||
import experimental/parser as newParser
|
||||
|
||||
proc getGraphUser*(id: string): Future[User] {.async.} =
|
||||
proc getGraphUser*(username: string): Future[User] {.async.} =
|
||||
if username.len == 0: return
|
||||
let
|
||||
variables = """{
|
||||
"screen_name": "$1",
|
||||
"withSafetyModeUserFields": false,
|
||||
"withSuperFollowsUserFields": false
|
||||
}""" % [username]
|
||||
js = await fetchRaw(graphUser ? {"variables": variables}, Api.userScreenName)
|
||||
result = parseGraphUser(js)
|
||||
|
||||
proc getGraphUserById*(id: string): Future[User] {.async.} =
|
||||
if id.len == 0 or id.any(c => not c.isDigit): return
|
||||
let
|
||||
variables = %*{"userId": id, "withSuperFollowsUserFields": true}
|
||||
js = await fetchRaw(graphUser ? {"variables": $variables}, Api.userRestId)
|
||||
variables = """{"userId": "$1", "withSuperFollowsUserFields": true}""" % [id]
|
||||
js = await fetchRaw(graphUserById ? {"variables": variables}, Api.userRestId)
|
||||
result = parseGraphUser(js)
|
||||
|
||||
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
|
||||
|
@ -47,20 +58,6 @@ proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
|
|||
url = listTimeline ? ps
|
||||
result = parseTimeline(await fetch(url, Api.timeline), after)
|
||||
|
||||
proc getUser*(username: string): Future[User] {.async.} =
|
||||
if username.len == 0: return
|
||||
let
|
||||
ps = genParams({"screen_name": username})
|
||||
json = await fetchRaw(userShow ? ps, Api.userShow)
|
||||
result = parseUser(json, username)
|
||||
|
||||
proc getUserById*(userId: string): Future[User] {.async.} =
|
||||
if userId.len == 0: return
|
||||
let
|
||||
ps = genParams({"user_id": userId})
|
||||
json = await fetchRaw(userShow ? ps, Api.userShow)
|
||||
result = parseUser(json)
|
||||
|
||||
proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
|
@ -104,16 +101,21 @@ proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
|
|||
except InternalError:
|
||||
return Result[T](beginning: true, query: query)
|
||||
|
||||
proc getTweetImpl(id: string; after=""): Future[Conversation] {.async.} =
|
||||
let url = tweet / (id & ".json") ? genParams(cursor=after)
|
||||
result = parseConversation(await fetch(url, Api.tweet), id)
|
||||
proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
|
||||
variables = tweetVariables % [id, cursor]
|
||||
params = {"variables": variables, "features": tweetFeatures}
|
||||
js = await fetch(graphTweet ? params, Api.tweetDetail)
|
||||
result = parseGraphConversation(js, id)
|
||||
|
||||
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} =
|
||||
result = (await getTweetImpl(id, after)).replies
|
||||
result = (await getGraphTweet(id, after)).replies
|
||||
result.beginning = after.len == 0
|
||||
|
||||
proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
|
||||
result = await getTweetImpl(id)
|
||||
result = await getGraphTweet(id)
|
||||
if after.len > 0:
|
||||
result.replies = await getReplies(id, after)
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
|
|||
result &= ("count", count)
|
||||
if cursor.len > 0:
|
||||
# The raw cursor often has plus signs, which sometimes get turned into spaces,
|
||||
# so we need to them back into a plus
|
||||
# so we need to turn them back into a plus
|
||||
if " " in cursor:
|
||||
result &= ("cursor", cursor.replace(" ", "+"))
|
||||
else:
|
||||
|
@ -69,9 +69,7 @@ template fetchImpl(result, fetchBody) {.dirty.} =
|
|||
|
||||
# Twitter randomly returns 401 errors with an empty body quite often.
|
||||
# Retrying the request usually works.
|
||||
var attempt = 0
|
||||
while resp.status == "401 Unauthorized" and result.len == 0 and attempt < 3:
|
||||
inc attempt
|
||||
if resp.status == "401 Unauthorized" and result.len == 0:
|
||||
getContent()
|
||||
|
||||
if resp.status == $Http503:
|
||||
|
@ -110,7 +108,7 @@ proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} =
|
|||
updateToken()
|
||||
|
||||
let error = result.getError
|
||||
if error in {invalidToken, forbidden, badToken}:
|
||||
if error in {invalidToken, badToken}:
|
||||
echo "fetch error: ", result.getError
|
||||
release(token, invalid=true)
|
||||
raise rateLimitError()
|
||||
|
@ -125,7 +123,7 @@ proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} =
|
|||
|
||||
if result.startsWith("{\"errors"):
|
||||
let errors = result.fromJson(Errors)
|
||||
if errors in {invalidToken, forbidden, badToken}:
|
||||
if errors in {invalidToken, badToken}:
|
||||
echo "fetch error: ", errors
|
||||
release(token, invalid=true)
|
||||
raise rateLimitError()
|
||||
|
|
|
@ -19,7 +19,9 @@ const
|
|||
tweet* = timelineApi / "conversation"
|
||||
|
||||
graphql = api / "graphql"
|
||||
graphUser* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId"
|
||||
graphTweet* = graphql / "6lWNh96EXDJCXl05SAtn_g/TweetDetail"
|
||||
graphUser* = graphql / "7mjxD3-C6BxitPMVQ6w0-Q/UserByScreenName"
|
||||
graphUserById* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId"
|
||||
graphList* = graphql / "JADTh6cjebfgetzvF3tQvQ/List"
|
||||
graphListBySlug* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug"
|
||||
graphListMembers* = graphql / "Ke6urWMeCV2UlKXGRy4sow/ListMembers"
|
||||
|
@ -57,3 +59,34 @@ const
|
|||
## user: "result_filter: user"
|
||||
## photos: "result_filter: photos"
|
||||
## videos: "result_filter: videos"
|
||||
|
||||
tweetVariables* = """{
|
||||
"focalTweetId": "$1",
|
||||
$2
|
||||
"includePromotedContent": false,
|
||||
"withBirdwatchNotes": false,
|
||||
"withDownvotePerspective": false,
|
||||
"withReactionsMetadata": false,
|
||||
"withReactionsPerspective": false,
|
||||
"withSuperFollowsTweetFields": false,
|
||||
"withSuperFollowsUserFields": false,
|
||||
"withVoice": false,
|
||||
"withV2Timeline": true
|
||||
}"""
|
||||
|
||||
tweetFeatures* = """{
|
||||
"graphql_is_translatable_rweb_tweet_is_translatable_enabled": false,
|
||||
"responsive_web_graphql_timeline_navigation_enabled": false,
|
||||
"standardized_nudges_misinfo": false,
|
||||
"verified_phone_label_enabled": false,
|
||||
"responsive_web_twitter_blue_verified_badge_is_enabled": false,
|
||||
"tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": false,
|
||||
"view_counts_everywhere_api_enabled": false,
|
||||
"responsive_web_edit_tweet_api_enabled": false,
|
||||
"tweetypie_unmention_optimization_enabled": false,
|
||||
"vibe_api_enabled": false,
|
||||
"longform_notetweets_consumption_enabled": true,
|
||||
"responsive_web_text_conversations_enabled": false,
|
||||
"responsive_web_enhance_cards_enabled": false,
|
||||
"interactive_text_enabled": false
|
||||
}"""
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
import options
|
||||
import jsony
|
||||
import user, ../types/[graphuser, graphlistmembers]
|
||||
from ../../types import User, Result, Query, QueryKind
|
||||
|
||||
proc parseGraphUser*(json: string): User =
|
||||
let raw = json.fromJson(GraphUser)
|
||||
|
||||
if raw.data.user.result.reason.get("") == "Suspended":
|
||||
return User(suspended: true)
|
||||
|
||||
result = toUser raw.data.user.result.legacy
|
||||
result.id = raw.data.user.result.restId
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import options
|
||||
import user
|
||||
|
||||
type
|
||||
|
@ -10,3 +11,4 @@ type
|
|||
UserResult = object
|
||||
legacy*: RawUser
|
||||
restId*: string
|
||||
reason*: Option[string]
|
||||
|
|
|
@ -13,7 +13,6 @@ let
|
|||
twLinkRegex = re"""<a href="https:\/\/twitter.com([^"]+)">twitter\.com(\S+)</a>"""
|
||||
|
||||
ytRegex = re(r"([A-z.]+\.)?youtu(be\.com|\.be)", {reStudy, reIgnoreCase})
|
||||
igRegex = re"(www\.)?instagram\.com"
|
||||
|
||||
rdRegex = re"(?<![.b])((www|np|new|amp|old)\.)?reddit.com"
|
||||
rdShortRegex = re"(?<![.b])redd\.it\/"
|
||||
|
@ -70,9 +69,6 @@ proc replaceUrls*(body: string; prefs: Prefs; absolute=""): string =
|
|||
if prefs.replaceReddit in result and "/gallery/" in result:
|
||||
result = result.replace("/gallery/", "/comments/")
|
||||
|
||||
if prefs.replaceInstagram.len > 0 and "instagram.com" in result:
|
||||
result = result.replace(igRegex, prefs.replaceInstagram)
|
||||
|
||||
if absolute.len > 0 and "href" in result:
|
||||
result = result.replace("href=\"/", &"href=\"{absolute}/")
|
||||
|
||||
|
|
|
@ -56,6 +56,7 @@ settings:
|
|||
port = Port(cfg.port)
|
||||
staticDir = cfg.staticDir
|
||||
bindAddr = cfg.address
|
||||
reusePort = true
|
||||
|
||||
routes:
|
||||
get "/":
|
||||
|
|
133
src/parser.nim
133
src/parser.nim
|
@ -72,8 +72,8 @@ proc parseGif(js: JsonNode): Gif =
|
|||
proc parseVideo(js: JsonNode): Video =
|
||||
result = Video(
|
||||
thumb: js{"media_url_https"}.getImageStr,
|
||||
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr,
|
||||
available: js{"ext_media_availability", "status"}.getStr == "available",
|
||||
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr($js{"mediaStats", "viewCount"}.getInt),
|
||||
available: js{"ext_media_availability", "status"}.getStr.toLowerAscii == "available",
|
||||
title: js{"ext_alt_text"}.getStr,
|
||||
durationMs: js{"video_info", "duration_millis"}.getInt
|
||||
# playbackType: mp4
|
||||
|
@ -185,7 +185,7 @@ proc parseCard(js: JsonNode; urls: JsonNode): Card =
|
|||
result.url.len == 0 or result.url.startsWith("card://"):
|
||||
result.url = getPicUrl(result.image)
|
||||
|
||||
proc parseTweet(js: JsonNode): Tweet =
|
||||
proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
|
||||
if js.isNull: return
|
||||
result = Tweet(
|
||||
id: js{"id_str"}.getId,
|
||||
|
@ -193,7 +193,6 @@ proc parseTweet(js: JsonNode): Tweet =
|
|||
replyId: js{"in_reply_to_status_id_str"}.getId,
|
||||
text: js{"full_text"}.getStr,
|
||||
time: js{"created_at"}.getTime,
|
||||
source: getSource(js),
|
||||
hasThread: js{"self_thread"}.notNull,
|
||||
available: true,
|
||||
user: User(id: js{"user_id_str"}.getStr),
|
||||
|
@ -205,12 +204,12 @@ proc parseTweet(js: JsonNode): Tweet =
|
|||
)
|
||||
)
|
||||
|
||||
result.expandTweetEntities(js)
|
||||
|
||||
# fix for pinned threads
|
||||
if result.hasThread and result.threadId == 0:
|
||||
result.threadId = js{"self_thread", "id_str"}.getId
|
||||
|
||||
result.expandTweetEntities(js)
|
||||
|
||||
if js{"is_quote_status"}.getBool:
|
||||
result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId)
|
||||
|
||||
|
@ -218,7 +217,7 @@ proc parseTweet(js: JsonNode): Tweet =
|
|||
result.retweet = some Tweet(id: rt.getId)
|
||||
return
|
||||
|
||||
with jsCard, js{"card"}:
|
||||
if jsCard.kind != JNull:
|
||||
let name = jsCard{"name"}.getStr
|
||||
if "poll" in name:
|
||||
if "image" in name:
|
||||
|
@ -295,64 +294,18 @@ proc parseGlobalObjects(js: JsonNode): GlobalObjects =
|
|||
result.users[k] = parseUser(v, k)
|
||||
|
||||
for k, v in tweets:
|
||||
var tweet = parseTweet(v)
|
||||
var tweet = parseTweet(v, v{"card"})
|
||||
if tweet.user.id in result.users:
|
||||
tweet.user = result.users[tweet.user.id]
|
||||
result.tweets[k] = tweet
|
||||
|
||||
proc parseThread(js: JsonNode; global: GlobalObjects): tuple[thread: Chain, self: bool] =
|
||||
result.thread = Chain()
|
||||
|
||||
let thread = js{"content", "item", "content", "conversationThread"}
|
||||
with cursor, thread{"showMoreCursor"}:
|
||||
result.thread.cursor = cursor{"value"}.getStr
|
||||
result.thread.hasMore = true
|
||||
|
||||
for t in thread{"conversationComponents"}:
|
||||
let content = t{"conversationTweetComponent", "tweet"}
|
||||
|
||||
if content{"displayType"}.getStr == "SelfThread":
|
||||
result.self = true
|
||||
|
||||
var tweet = finalizeTweet(global, content{"id"}.getStr)
|
||||
if not tweet.available:
|
||||
tweet.tombstone = getTombstone(content{"tombstone"})
|
||||
result.thread.content.add tweet
|
||||
|
||||
proc parseConversation*(js: JsonNode; tweetId: string): Conversation =
|
||||
result = Conversation(replies: Result[Chain](beginning: true))
|
||||
let global = parseGlobalObjects(? js)
|
||||
|
||||
let instructions = ? js{"timeline", "instructions"}
|
||||
if instructions.len == 0:
|
||||
return
|
||||
|
||||
for e in instructions[0]{"addEntries", "entries"}:
|
||||
let entry = e{"entryId"}.getStr
|
||||
if "tweet" in entry or "tombstone" in entry:
|
||||
let tweet = finalizeTweet(global, e.getEntryId)
|
||||
if $tweet.id != tweetId:
|
||||
result.before.content.add tweet
|
||||
else:
|
||||
result.tweet = tweet
|
||||
elif "conversationThread" in entry:
|
||||
let (thread, self) = parseThread(e, global)
|
||||
if thread.content.len > 0:
|
||||
if self:
|
||||
result.after = thread
|
||||
else:
|
||||
result.replies.content.add thread
|
||||
elif "cursor-showMore" in entry:
|
||||
result.replies.bottom = e.getCursor
|
||||
elif "cursor-bottom" in entry:
|
||||
result.replies.bottom = e.getCursor
|
||||
|
||||
proc parseStatus*(js: JsonNode): Tweet =
|
||||
with e, js{"errors"}:
|
||||
if e.getError == tweetNotFound:
|
||||
if e.getError in {tweetNotFound, tweetUnavailable, tweetCensored, doesntExist,
|
||||
tweetNotAuthorized, suspended}:
|
||||
return
|
||||
|
||||
result = parseTweet(js)
|
||||
result = parseTweet(js, js{"card"})
|
||||
if not result.isNil:
|
||||
result.user = parseUser(js{"user"})
|
||||
|
||||
|
@ -409,7 +362,7 @@ proc parseTimeline*(js: JsonNode; after=""): Timeline =
|
|||
proc parsePhotoRail*(js: JsonNode): PhotoRail =
|
||||
for tweet in js:
|
||||
let
|
||||
t = parseTweet(tweet)
|
||||
t = parseTweet(tweet, js{"card"})
|
||||
url = if t.photos.len > 0: t.photos[0]
|
||||
elif t.video.isSome: get(t.video).thumb
|
||||
elif t.gif.isSome: get(t.gif).thumb
|
||||
|
@ -418,3 +371,67 @@ proc parsePhotoRail*(js: JsonNode): PhotoRail =
|
|||
|
||||
if url.len == 0: continue
|
||||
result.add GalleryPhoto(url: url, tweetId: $t.id)
|
||||
|
||||
proc parseGraphTweet(js: JsonNode): Tweet =
|
||||
if js.kind == JNull or js{"__typename"}.getStr == "TweetUnavailable":
|
||||
return Tweet(available: false)
|
||||
|
||||
var jsCard = copy(js{"card", "legacy"})
|
||||
if jsCard.kind != JNull:
|
||||
var values = newJObject()
|
||||
for val in jsCard["binding_values"]:
|
||||
values[val["key"].getStr] = val["value"]
|
||||
jsCard["binding_values"] = values
|
||||
|
||||
result = parseTweet(js{"legacy"}, jsCard)
|
||||
result.user = parseUser(js{"core", "user_results", "result", "legacy"})
|
||||
|
||||
with noteTweet, js{"note_tweet", "note_tweet_results", "result"}:
|
||||
result.expandNoteTweetEntities(noteTweet)
|
||||
|
||||
if result.quote.isSome:
|
||||
result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"}))
|
||||
|
||||
proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
|
||||
let thread = js{"content", "items"}
|
||||
for t in js{"content", "items"}:
|
||||
let entryId = t{"entryId"}.getStr
|
||||
if "cursor-showmore" in entryId:
|
||||
let cursor = t{"item", "itemContent", "value"}
|
||||
result.thread.cursor = cursor.getStr
|
||||
result.thread.hasMore = true
|
||||
elif "tweet" in entryId:
|
||||
let tweet = parseGraphTweet(t{"item", "itemContent", "tweet_results", "result"})
|
||||
result.thread.content.add tweet
|
||||
|
||||
if t{"item", "itemContent", "tweetDisplayType"}.getStr == "SelfThread":
|
||||
result.self = true
|
||||
|
||||
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
|
||||
result = Conversation(replies: Result[Chain](beginning: true))
|
||||
|
||||
let instructions = ? js{"data", "threaded_conversation_with_injections_v2", "instructions"}
|
||||
if instructions.len == 0:
|
||||
return
|
||||
|
||||
for e in instructions[0]{"entries"}:
|
||||
let entryId = e{"entryId"}.getStr
|
||||
# echo entryId
|
||||
if entryId.startsWith("tweet"):
|
||||
let tweet = parseGraphTweet(e{"content", "itemContent", "tweet_results", "result"})
|
||||
|
||||
if not tweet.available:
|
||||
tweet.id = parseBiggestInt(entryId.getId())
|
||||
|
||||
if $tweet.id == tweetId:
|
||||
result.tweet = tweet
|
||||
else:
|
||||
result.before.content.add tweet
|
||||
elif entryId.startsWith("conversationthread"):
|
||||
let (thread, self) = parseGraphThread(e)
|
||||
if self:
|
||||
result.after = thread
|
||||
else:
|
||||
result.replies.content.add thread
|
||||
elif entryId.startsWith("cursor-bottom"):
|
||||
result.replies.bottom = e{"content", "itemContent", "value"}.getStr
|
||||
|
|
|
@ -133,10 +133,6 @@ proc getTombstone*(js: JsonNode): string =
|
|||
result = js{"tombstoneInfo", "richText", "text"}.getStr
|
||||
result.removeSuffix(" Learn more")
|
||||
|
||||
proc getSource*(js: JsonNode): string =
|
||||
let src = js{"source"}.getStr
|
||||
result = src.substr(src.find('>') + 1, src.rfind('<') - 1)
|
||||
|
||||
proc getMp4Resolution*(url: string): int =
|
||||
# parses the height out of a URL like this one:
|
||||
# https://video.twimg.com/ext_tw_video/<tweet-id>/pu/vid/720x1280/<random>.mp4
|
||||
|
@ -234,47 +230,37 @@ proc expandUserEntities*(user: var User; js: JsonNode) =
|
|||
user.bio = user.bio.replacef(unRegex, unReplace)
|
||||
.replacef(htRegex, htReplace)
|
||||
|
||||
proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
|
||||
let
|
||||
orig = tweet.text.toRunes
|
||||
textRange = js{"display_text_range"}
|
||||
textSlice = textRange{0}.getInt .. textRange{1}.getInt
|
||||
hasQuote = js{"is_quote_status"}.getBool
|
||||
hasCard = tweet.card.isSome
|
||||
|
||||
var replyTo = ""
|
||||
if tweet.replyId != 0:
|
||||
with reply, js{"in_reply_to_screen_name"}:
|
||||
tweet.reply.add reply.getStr
|
||||
replyTo = reply.getStr
|
||||
|
||||
let ent = ? js{"entities"}
|
||||
proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlice: Slice[int];
|
||||
replyTo=""; hasQuote=false) =
|
||||
let hasCard = tweet.card.isSome
|
||||
|
||||
var replacements = newSeq[ReplaceSlice]()
|
||||
|
||||
with urls, ent{"urls"}:
|
||||
with urls, entities{"urls"}:
|
||||
for u in urls:
|
||||
let urlStr = u["url"].getStr
|
||||
if urlStr.len == 0 or urlStr notin tweet.text:
|
||||
if urlStr.len == 0 or urlStr notin text:
|
||||
continue
|
||||
|
||||
replacements.extractUrls(u, textSlice.b, hideTwitter = hasQuote)
|
||||
|
||||
if hasCard and u{"url"}.getStr == get(tweet.card).url:
|
||||
get(tweet.card).url = u{"expanded_url"}.getStr
|
||||
|
||||
with media, ent{"media"}:
|
||||
with media, entities{"media"}:
|
||||
for m in media:
|
||||
replacements.extractUrls(m, textSlice.b, hideTwitter = true)
|
||||
|
||||
if "hashtags" in ent:
|
||||
for hashtag in ent["hashtags"]:
|
||||
if "hashtags" in entities:
|
||||
for hashtag in entities["hashtags"]:
|
||||
replacements.extractHashtags(hashtag)
|
||||
|
||||
if "symbols" in ent:
|
||||
for symbol in ent["symbols"]:
|
||||
if "symbols" in entities:
|
||||
for symbol in entities["symbols"]:
|
||||
replacements.extractHashtags(symbol)
|
||||
|
||||
if "user_mentions" in ent:
|
||||
for mention in ent["user_mentions"]:
|
||||
if "user_mentions" in entities:
|
||||
for mention in entities["user_mentions"]:
|
||||
let
|
||||
name = mention{"screen_name"}.getStr
|
||||
slice = mention.extractSlice
|
||||
|
@ -291,5 +277,27 @@ proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
|
|||
replacements.deduplicate
|
||||
replacements.sort(cmp)
|
||||
|
||||
tweet.text = orig.replacedWith(replacements, textSlice)
|
||||
.strip(leading=false)
|
||||
tweet.text = text.toRunes.replacedWith(replacements, textSlice).strip(leading=false)
|
||||
|
||||
proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
|
||||
let
|
||||
entities = ? js{"entities"}
|
||||
hasQuote = js{"is_quote_status"}.getBool
|
||||
textRange = js{"display_text_range"}
|
||||
textSlice = textRange{0}.getInt .. textRange{1}.getInt
|
||||
|
||||
var replyTo = ""
|
||||
if tweet.replyId != 0:
|
||||
with reply, js{"in_reply_to_screen_name"}:
|
||||
replyTo = reply.getStr
|
||||
tweet.reply.add replyTo
|
||||
|
||||
tweet.expandTextEntities(entities, tweet.text, textSlice, replyTo, hasQuote)
|
||||
|
||||
proc expandNoteTweetEntities*(tweet: Tweet; js: JsonNode) =
|
||||
let
|
||||
entities = ? js{"entity_set"}
|
||||
text = js{"text"}.getStr
|
||||
textSlice = 0..text.runeLen
|
||||
|
||||
tweet.expandTextEntities(entities, text, textSlice)
|
||||
|
|
|
@ -83,7 +83,7 @@ genPrefs:
|
|||
"Enable mp4 video playback (only for gifs)"
|
||||
|
||||
hlsPlayback(checkbox, false):
|
||||
"Enable hls video streaming (requires JavaScript)"
|
||||
"Enable HLS video streaming (requires JavaScript)"
|
||||
|
||||
proxyVideos(checkbox, true):
|
||||
"Proxy video streaming through the server (might be slow)"
|
||||
|
@ -107,10 +107,6 @@ genPrefs:
|
|||
"Reddit -> Teddit/Libreddit"
|
||||
placeholder: "Teddit hostname"
|
||||
|
||||
replaceInstagram(input, ""):
|
||||
"Instagram -> Bibliogram"
|
||||
placeholder: "Bibliogram hostname"
|
||||
|
||||
iterator allPrefs*(): Pref =
|
||||
for k, v in prefList:
|
||||
for pref in v:
|
||||
|
|
|
@ -118,11 +118,11 @@ proc getUserId*(username: string): Future[string] {.async.} =
|
|||
pool.withAcquire(r):
|
||||
result = await r.hGet(name.uidKey, name)
|
||||
if result == redisNil:
|
||||
let user = await getUser(username)
|
||||
let user = await getGraphUser(username)
|
||||
if user.suspended:
|
||||
return "suspended"
|
||||
else:
|
||||
await cacheUserId(name, user.id)
|
||||
await all(cacheUserId(name, user.id), cache(user))
|
||||
return user.id
|
||||
|
||||
proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
|
||||
|
@ -130,8 +130,7 @@ proc getCachedUser*(username: string; fetch=true): Future[User] {.async.} =
|
|||
if prof != redisNil:
|
||||
prof.deserialize(User)
|
||||
elif fetch:
|
||||
let userId = await getUserId(username)
|
||||
result = await getGraphUser(userId)
|
||||
result = await getGraphUser(username)
|
||||
await cache(result)
|
||||
|
||||
proc getCachedUsername*(userId: string): Future[string] {.async.} =
|
||||
|
@ -142,9 +141,11 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
|
|||
if username != redisNil:
|
||||
result = username
|
||||
else:
|
||||
let user = await getUserById(userId)
|
||||
let user = await getGraphUserById(userId)
|
||||
result = user.username
|
||||
await setEx(key, baseCacheTime, result)
|
||||
if result.len > 0 and user.id.len > 0:
|
||||
await all(cacheUserId(result, user.id), cache(user))
|
||||
|
||||
proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
|
||||
if id == 0: return
|
||||
|
@ -153,7 +154,7 @@ proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
|
|||
tweet.deserialize(Tweet)
|
||||
else:
|
||||
result = await getStatus($id)
|
||||
if result.isNil:
|
||||
if not result.isNil:
|
||||
await cache(result)
|
||||
|
||||
proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
||||
|
|
|
@ -41,7 +41,8 @@ proc getPoolJson*(): JsonNode =
|
|||
let
|
||||
maxReqs =
|
||||
case api
|
||||
of Api.listMembers, Api.listBySlug, Api.list, Api.userRestId: 500
|
||||
of Api.listMembers, Api.listBySlug, Api.list,
|
||||
Api.userRestId, Api.userScreenName, Api.tweetDetail: 500
|
||||
of Api.timeline: 187
|
||||
else: 180
|
||||
reqs = maxReqs - token.apis[api].remaining
|
||||
|
|
|
@ -9,6 +9,7 @@ type
|
|||
InternalError* = object of CatchableError
|
||||
|
||||
Api* {.pure.} = enum
|
||||
tweetDetail
|
||||
userShow
|
||||
timeline
|
||||
search
|
||||
|
@ -17,6 +18,7 @@ type
|
|||
listBySlug
|
||||
listMembers
|
||||
userRestId
|
||||
userScreenName
|
||||
status
|
||||
|
||||
RateLimit* = object
|
||||
|
@ -42,9 +44,12 @@ type
|
|||
invalidToken = 89
|
||||
listIdOrSlug = 112
|
||||
tweetNotFound = 144
|
||||
tweetNotAuthorized = 179
|
||||
forbidden = 200
|
||||
badToken = 239
|
||||
noCsrf = 353
|
||||
tweetUnavailable = 421
|
||||
tweetCensored = 422
|
||||
|
||||
User* = object
|
||||
id*: string
|
||||
|
@ -175,6 +180,7 @@ type
|
|||
available*: bool
|
||||
tombstone*: string
|
||||
location*: string
|
||||
# Unused, needed for backwards compat
|
||||
source*: string
|
||||
stats*: TweetStats
|
||||
retweet*: Option[Tweet]
|
||||
|
|
|
@ -99,9 +99,8 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
|
|||
link(rel="preload", type="image/png", href=bannerUrl, `as`="image")
|
||||
|
||||
for url in images:
|
||||
let suffix = if "400x400" in url or url.endsWith("placeholder.png"): ""
|
||||
else: "?name=small"
|
||||
let preloadUrl = getPicUrl(url & suffix)
|
||||
let preloadUrl = if "400x400" in url: getPicUrl(url)
|
||||
else: getSmallPic(url)
|
||||
link(rel="preload", type="image/png", href=preloadUrl, `as`="image")
|
||||
|
||||
let image = getUrlPrefix(cfg) & getPicUrl(url)
|
||||
|
|
|
@ -3,6 +3,14 @@ import strutils, strformat
|
|||
import karax/[karaxdsl, vdom, vstyles]
|
||||
import ".."/[types, utils]
|
||||
|
||||
const smallWebp* = "?name=small&format=webp"
|
||||
|
||||
proc getSmallPic*(url: string): string =
|
||||
result = url
|
||||
if "?" notin url and not url.endsWith("placeholder.png"):
|
||||
result &= smallWebp
|
||||
result = getPicUrl(result)
|
||||
|
||||
proc icon*(icon: string; text=""; title=""; class=""; href=""): VNode =
|
||||
var c = "icon-" & icon
|
||||
if class.len > 0: c = &"{c} {class}"
|
||||
|
|
|
@ -7,14 +7,7 @@ import renderutils
|
|||
import ".."/[types, utils, formatters]
|
||||
import general
|
||||
|
||||
const
|
||||
doctype = "<!DOCTYPE html>\n"
|
||||
|
||||
proc getSmallPic(url: string): string =
|
||||
result = url
|
||||
if "?" notin url and not url.endsWith("placeholder.png"):
|
||||
result &= "?name=small"
|
||||
result = getPicUrl(result)
|
||||
const doctype = "<!DOCTYPE html>\n"
|
||||
|
||||
proc renderMiniAvatar(user: User; prefs: Prefs): VNode =
|
||||
let url = getPicUrl(user.getUserPic("_mini"))
|
||||
|
@ -60,9 +53,8 @@ proc renderAlbum(tweet: Tweet): VNode =
|
|||
tdiv(class="attachment image"):
|
||||
let
|
||||
named = "name=" in photo
|
||||
orig = photo
|
||||
small = if named: photo else: photo & "?name=small"
|
||||
a(href=getOrigPicUrl(orig), class="still-image", target="_blank"):
|
||||
small = if named: photo else: photo & smallWebp
|
||||
a(href=getOrigPicUrl(photo), class="still-image", target="_blank"):
|
||||
genImg(small)
|
||||
|
||||
proc isPlaybackEnabled(prefs: Prefs; playbackType: VideoType): bool =
|
||||
|
@ -355,7 +347,7 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
|
|||
renderQuote(tweet.quote.get(), prefs, path)
|
||||
|
||||
if mainTweet:
|
||||
p(class="tweet-published"): text &"{getTime(tweet)} · {tweet.source}"
|
||||
p(class="tweet-published"): text &"{getTime(tweet)}"
|
||||
|
||||
if tweet.mediaTags.len > 0:
|
||||
renderMediaTags(tweet.mediaTags)
|
||||
|
|
1
tests/requirements.txt
Normal file
1
tests/requirements.txt
Normal file
|
@ -0,0 +1 @@
|
|||
seleniumbase
|
|
@ -42,7 +42,7 @@ no_thumb = [
|
|||
|
||||
['nim_lang/status/1082989146040340480',
|
||||
'Nim in 2018: A short recap',
|
||||
'Posted in r/programming by u/miran1',
|
||||
'Posted by u/miran1 - 36 votes and 46 comments',
|
||||
'reddit.com']
|
||||
]
|
||||
|
||||
|
|
Loading…
Reference in a new issue