Merge branch 'master' into oidc

This commit is contained in:
Jeidnx 2023-10-24 13:44:02 +02:00 committed by GitHub
commit 9b7246a029
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 796 additions and 204 deletions

View File

@ -11,15 +11,15 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
java: [ 17 ] java: [ 21 ]
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: set up JDK ${{ matrix.java }} - name: set up JDK ${{ matrix.java }}
uses: actions/setup-java@v3 uses: actions/setup-java@v3
with: with:
java-version: ${{ matrix.java }} java-version: ${{ matrix.java }}
distribution: temurin distribution: zulu
cache: "gradle" cache: "gradle"
- name: Run Build - name: Run Build
run: ./gradlew build run: ./gradlew build

View File

@ -9,43 +9,46 @@ on:
pull_request: pull_request:
jobs: jobs:
build-jdk:
uses: ./.github/workflows/fat-build.yml
build-test: build-test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build-jdk
strategy: strategy:
matrix: matrix:
docker-compose-file: docker-compose-file:
- docker-compose.yml - docker-compose.yml
- testing/docker-compose.hsqldb.yml
- testing/docker-compose.cockroachdb.yml - testing/docker-compose.cockroachdb.yml
- testing/docker-compose.yugabytedb.yml - testing/docker-compose.yugabytedb.yml
dockerfile: dockerfile:
- Dockerfile.ci - Dockerfile.ci
- Dockerfile.azul.ci - Dockerfile.azul.ci
- Dockerfile.openj9.ci #- Dockerfile.openj9.ci
- Dockerfile.graalvm-jvm.ci - Dockerfile.graalvm-jvm.ci
include:
- sleep: 20
- docker-compose-file: testing/docker-compose.cockroachdb.yml
sleep: 30
- docker-compose-file: testing/docker-compose.yugabytedb.yml
sleep: 120
fail-fast: false fail-fast: false
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: actions/download-artifact@v3
with:
name: piped.jar
- name: Create Version File - name: Create Version File
run: echo $(git log -1 --date=short --pretty=format:%cd)-$(git rev-parse --short HEAD) > VERSION run: echo $(git log -1 --date=short --pretty=format:%cd)-$(git rev-parse --short HEAD) > VERSION
- name: set up JDK 17
uses: actions/setup-java@v3
with:
java-version: 17
distribution: temurin
cache: "gradle"
- name: Run Build
run: ./gradlew shadowJar
- run: mv build/libs/piped-*-all.jar piped.jar
- name: Build Image Locally - name: Build Image Locally
uses: docker/build-push-action@v4 uses: docker/build-push-action@v5
with: with:
context: . context: .
load: true load: true
file: ${{ matrix.dockerfile }} file: ${{ matrix.dockerfile }}
tags: 1337kavin/piped:latest tags: 1337kavin/piped:latest
- name: Start Docker-Compose services - name: Start Docker-Compose services
run: docker-compose -f ${{ matrix.docker-compose-file }} up -d && sleep 20 run: docker-compose -f ${{ matrix.docker-compose-file }} up -d && sleep ${{ matrix.sleep }}
- name: Run tests - name: Run tests
run: ./testing/api-test.sh run: ./testing/api-test.sh
- name: Collect services logs - name: Collect services logs

View File

@ -8,13 +8,17 @@ on:
- master - master
jobs: jobs:
build-jdk:
uses: ./.github/workflows/fat-build.yml
build-docker: build-docker:
needs: build-jdk
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
include: include:
- image: 1337kavin/piped:openj9 # - image: 1337kavin/piped:openj9
dockerfile: ./Dockerfile.openj9.ci # dockerfile: ./Dockerfile.openj9.ci
- image: 1337kavin/piped:hotspot - image: 1337kavin/piped:hotspot
dockerfile: ./Dockerfile.ci dockerfile: ./Dockerfile.ci
- image: 1337kavin/piped:latest,1337kavin/piped:azul-zulu - image: 1337kavin/piped:latest,1337kavin/piped:azul-zulu
@ -22,34 +26,28 @@ jobs:
- image: 1337kavin/piped:graalvm-jvm - image: 1337kavin/piped:graalvm-jvm
dockerfile: ./Dockerfile.graalvm-jvm.ci dockerfile: ./Dockerfile.graalvm-jvm.ci
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- uses: actions/download-artifact@v3
with:
name: piped.jar
- name: Create Version File - name: Create Version File
run: echo $(git log -1 --date=short --pretty=format:%cd)-$(git rev-parse --short HEAD) > VERSION run: echo $(git log -1 --date=short --pretty=format:%cd)-$(git rev-parse --short HEAD) > VERSION
- name: set up JDK 17
uses: actions/setup-java@v3
with:
java-version: 17
distribution: temurin
cache: "gradle"
- name: Run Build
run: ./gradlew shadowJar
- run: mv build/libs/piped-*-all.jar piped.jar
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2 uses: docker/setup-qemu-action@v3
with: with:
platforms: all platforms: all
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v3
with: with:
version: latest version: latest
- name: Login to DockerHub - name: Login to DockerHub
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }} password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push - name: Build and push
uses: docker/build-push-action@v4 uses: docker/build-push-action@v5
with: with:
context: . context: .
file: ${{ matrix.dockerfile }} file: ${{ matrix.dockerfile }}

View File

@ -0,0 +1,82 @@
name: Docker-Compose Build and Test Migration
on:
pull_request:
paths:
- "src/main/resources/changelog/**"
- "src/main/java/me/kavin/piped/utils/obj/db/**"
jobs:
build-new:
uses: ./.github/workflows/fat-build.yml
build-old:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.base.sha }}
- name: set up JDK 21
uses: actions/setup-java@v3
with:
java-version: 21
distribution: zulu
cache: "gradle"
- name: Run Build
run: ./gradlew shadowJar
- run: mv build/libs/piped-*-all.jar piped.jar
- uses: actions/upload-artifact@v3
with:
name: piped-old.jar
path: piped.jar
docker-build-test:
needs: [ build-new, build-old ]
runs-on: ubuntu-latest
strategy:
matrix:
docker-compose-file:
- docker-compose.yml
- testing/docker-compose.cockroachdb.yml
- testing/docker-compose.yugabytedb.yml
dockerfile:
- Dockerfile.azul.ci
include:
- sleep: 20
- docker-compose-file: testing/docker-compose.cockroachdb.yml
sleep: 30
- docker-compose-file: testing/docker-compose.yugabytedb.yml
sleep: 120
fail-fast: false
steps:
- uses: actions/checkout@v4
- run: echo "unknown" > VERSION
- uses: actions/download-artifact@v3
with:
name: piped-old.jar
- name: Build Old Image Locally
uses: docker/build-push-action@v5
with:
context: .
load: true
file: ${{ matrix.dockerfile }}
tags: 1337kavin/piped:latest
- name: Start Docker-Compose services
run: docker-compose -f ${{ matrix.docker-compose-file }} up -d && sleep ${{ matrix.sleep }}
- run: rm piped.jar
- uses: actions/download-artifact@v3
with:
name: piped.jar
- name: Build New Image Locally
uses: docker/build-push-action@v5
with:
context: .
load: true
file: ${{ matrix.dockerfile }}
tags: 1337kavin/piped:latest
- name: Start Docker-Compose services
run: docker-compose -f ${{ matrix.docker-compose-file }} up -d && sleep ${{ matrix.sleep }}
- name: Run tests
run: ./testing/api-test.sh
- name: Collect services logs
if: failure()
run: docker-compose -f ${{ matrix.docker-compose-file }} logs

24
.github/workflows/fat-build.yml vendored Normal file
View File

@ -0,0 +1,24 @@
name: Fat JAR Build
on:
workflow_call:
jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: set up JDK 21
uses: actions/setup-java@v3
with:
java-version: 21
distribution: zulu
cache: "gradle"
- name: Run Build
run: ./gradlew shadowJar
- run: mv build/libs/piped-*-all.jar piped.jar
- uses: actions/upload-artifact@v3
with:
name: piped.jar
path: piped.jar

View File

@ -1,4 +1,4 @@
FROM eclipse-temurin:17-jdk AS build FROM eclipse-temurin:21-jdk AS build
WORKDIR /app/ WORKDIR /app/
@ -7,11 +7,19 @@ COPY . /app/
RUN --mount=type=cache,target=/root/.gradle/caches/ \ RUN --mount=type=cache,target=/root/.gradle/caches/ \
./gradlew shadowJar ./gradlew shadowJar
FROM eclipse-temurin:17-jre FROM eclipse-temurin:21-jre
RUN --mount=type=cache,target=/var/cache/apt/ \
apt-get update && \
apt-get install -y --no-install-recommends \
curl \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
WORKDIR /app/ WORKDIR /app/
COPY hotspot-entrypoint.sh / COPY hotspot-entrypoint.sh docker-healthcheck.sh /
COPY --from=build /app/build/libs/piped-1.0-all.jar /app/piped.jar COPY --from=build /app/build/libs/piped-1.0-all.jar /app/piped.jar
@ -19,4 +27,5 @@ COPY VERSION .
EXPOSE 8080 EXPOSE 8080
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 CMD /docker-healthcheck.sh
ENTRYPOINT ["/hotspot-entrypoint.sh"] ENTRYPOINT ["/hotspot-entrypoint.sh"]

View File

@ -1,4 +1,4 @@
FROM azul/zulu-openjdk:17-latest AS build FROM azul/zulu-openjdk:21-latest AS build
WORKDIR /app/ WORKDIR /app/
@ -7,11 +7,19 @@ COPY . /app/
RUN --mount=type=cache,target=/root/.gradle/caches/ \ RUN --mount=type=cache,target=/root/.gradle/caches/ \
./gradlew shadowJar ./gradlew shadowJar
FROM azul/zulu-openjdk:17-jre-headless-latest FROM azul/zulu-openjdk:21-jre-headless-latest
RUN --mount=type=cache,target=/var/cache/apt/ \
apt-get update && \
apt-get install -y --no-install-recommends \
curl \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
WORKDIR /app/ WORKDIR /app/
COPY hotspot-entrypoint.sh / COPY hotspot-entrypoint.sh docker-healthcheck.sh /
COPY --from=build /app/build/libs/piped-1.0-all.jar /app/piped.jar COPY --from=build /app/build/libs/piped-1.0-all.jar /app/piped.jar
@ -19,4 +27,5 @@ COPY VERSION .
EXPOSE 8080 EXPOSE 8080
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 CMD /docker-healthcheck.sh
ENTRYPOINT ["/hotspot-entrypoint.sh"] ENTRYPOINT ["/hotspot-entrypoint.sh"]

View File

@ -1,8 +1,16 @@
FROM azul/zulu-openjdk:17-jre-headless-latest FROM azul/zulu-openjdk:21-jre-headless-latest
RUN --mount=type=cache,target=/var/cache/apt/ \
apt-get update && \
apt-get install -y --no-install-recommends \
curl \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
WORKDIR /app/ WORKDIR /app/
COPY hotspot-entrypoint.sh / COPY hotspot-entrypoint.sh docker-healthcheck.sh /
COPY ./piped.jar /app/piped.jar COPY ./piped.jar /app/piped.jar
@ -10,4 +18,5 @@ COPY VERSION .
EXPOSE 8080 EXPOSE 8080
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 CMD /docker-healthcheck.sh
ENTRYPOINT ["/hotspot-entrypoint.sh"] ENTRYPOINT ["/hotspot-entrypoint.sh"]

View File

@ -1,8 +1,16 @@
FROM eclipse-temurin:17-jre FROM eclipse-temurin:21-jre
RUN --mount=type=cache,target=/var/cache/apt/ \
apt-get update && \
apt-get install -y --no-install-recommends \
curl \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
WORKDIR /app/ WORKDIR /app/
COPY hotspot-entrypoint.sh / COPY hotspot-entrypoint.sh docker-healthcheck.sh /
COPY ./piped.jar /app/piped.jar COPY ./piped.jar /app/piped.jar
@ -10,4 +18,5 @@ COPY VERSION .
EXPOSE 8080 EXPOSE 8080
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 CMD /docker-healthcheck.sh
ENTRYPOINT ["/hotspot-entrypoint.sh"] ENTRYPOINT ["/hotspot-entrypoint.sh"]

View File

@ -1,4 +1,4 @@
FROM ghcr.io/graalvm/native-image:latest as build FROM container-registry.oracle.com/graalvm/native-image:latest as build
WORKDIR /app/ WORKDIR /app/
@ -17,16 +17,27 @@ RUN jlink \
FROM debian:stable-slim FROM debian:stable-slim
RUN --mount=type=cache,target=/var/cache/apt/ \
apt-get update && \
apt-get install -y --no-install-recommends \
curl \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ENV JAVA_HOME=/opt/java/openjdk ENV JAVA_HOME=/opt/java/openjdk
ENV PATH "${JAVA_HOME}/bin:${PATH}" ENV PATH "${JAVA_HOME}/bin:${PATH}"
COPY --from=build /javaruntime $JAVA_HOME COPY --from=build /javaruntime $JAVA_HOME
WORKDIR /app/ WORKDIR /app/
COPY docker-healthcheck.sh /
COPY --from=build /app/build/libs/piped-1.0-all.jar /app/piped.jar COPY --from=build /app/build/libs/piped-1.0-all.jar /app/piped.jar
COPY VERSION . COPY VERSION .
EXPOSE 8080 EXPOSE 8080
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 CMD /docker-healthcheck.sh
CMD java -jar /app/piped.jar CMD java -jar /app/piped.jar

View File

@ -1,4 +1,4 @@
FROM ghcr.io/graalvm/native-image:latest as build FROM container-registry.oracle.com/graalvm/native-image:latest as build
RUN jlink \ RUN jlink \
--add-modules java.base,java.logging,java.sql,java.management,java.xml,java.naming,java.desktop,jdk.crypto.ec \ --add-modules java.base,java.logging,java.sql,java.management,java.xml,java.naming,java.desktop,jdk.crypto.ec \
@ -10,16 +10,27 @@ RUN jlink \
FROM debian:stable-slim FROM debian:stable-slim
RUN --mount=type=cache,target=/var/cache/apt/ \
apt-get update && \
apt-get install -y --no-install-recommends \
curl \
&& \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
ENV JAVA_HOME=/opt/java/openjdk ENV JAVA_HOME=/opt/java/openjdk
ENV PATH "${JAVA_HOME}/bin:${PATH}" ENV PATH "${JAVA_HOME}/bin:${PATH}"
COPY --from=build /javaruntime $JAVA_HOME COPY --from=build /javaruntime $JAVA_HOME
WORKDIR /app/ WORKDIR /app/
COPY docker-healthcheck.sh /
COPY ./piped.jar /app/piped.jar COPY ./piped.jar /app/piped.jar
COPY VERSION . COPY VERSION .
EXPOSE 8080 EXPOSE 8080
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 CMD /docker-healthcheck.sh
CMD java -jar /app/piped.jar CMD java -jar /app/piped.jar

View File

@ -1,7 +1,6 @@
plugins { plugins {
id "com.github.johnrengelman.shadow" version "8.1.1" id "com.github.johnrengelman.shadow" version "8.1.1"
id "java" id "java"
id "io.freefair.lombok" version "8.1.0"
id "eclipse" id "eclipse"
} }
@ -13,36 +12,40 @@ repositories {
dependencies { dependencies {
implementation 'org.apache.commons:commons-lang3:3.13.0' implementation 'org.apache.commons:commons-lang3:3.13.0'
implementation 'org.apache.commons:commons-text:1.10.0' implementation 'org.apache.commons:commons-text:1.10.0'
implementation 'commons-io:commons-io:2.12.0' implementation 'commons-io:commons-io:2.14.0'
implementation 'it.unimi.dsi:fastutil-core:8.5.12' implementation 'it.unimi.dsi:fastutil-core:8.5.12'
implementation 'commons-codec:commons-codec:1.16.0' implementation 'commons-codec:commons-codec:1.16.0'
implementation 'org.bouncycastle:bcprov-jdk15on:1.70' implementation 'org.bouncycastle:bcprov-jdk15on:1.70'
implementation 'com.github.FireMasterK.NewPipeExtractor:NewPipeExtractor:88ceba0da4a48b5f4ffecb3b5b2f36f95ec53afe' implementation 'com.github.FireMasterK.NewPipeExtractor:NewPipeExtractor:48beff184a9792c4787cfa05fce577c3adf89f56'
implementation 'com.github.FireMasterK:nanojson:9f4af3b739cc13f3d0d9d4b758bbe2b2ae7119d7' implementation 'com.github.FireMasterK:nanojson:9f4af3b739cc13f3d0d9d4b758bbe2b2ae7119d7'
implementation 'com.nimbusds:oauth2-oidc-sdk:11.5.0'
implementation 'com.fasterxml.jackson.core:jackson-core:2.15.2' implementation 'com.fasterxml.jackson.core:jackson-core:2.15.2'
implementation 'com.fasterxml.jackson.core:jackson-annotations:2.15.2' implementation 'com.fasterxml.jackson.core:jackson-annotations:2.15.2'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.15.2' implementation 'com.fasterxml.jackson.core:jackson-databind:2.15.2'
implementation 'com.rometools:rome:2.1.0' implementation 'com.rometools:rome:2.1.0'
implementation 'com.rometools:rome-modules:2.1.0'
implementation 'org.jsoup:jsoup:1.16.1' implementation 'org.jsoup:jsoup:1.16.1'
implementation 'io.activej:activej-common:5.5' implementation 'io.activej:activej-common:5.5'
implementation 'io.activej:activej-http:5.5' implementation 'io.activej:activej-http:5.5'
implementation 'io.activej:activej-boot:5.5' implementation 'io.activej:activej-boot:5.5'
implementation 'io.activej:activej-specializer:5.5' implementation 'io.activej:activej-specializer:5.5'
implementation 'io.activej:activej-launchers-http:5.5' implementation 'io.activej:activej-launchers-http:5.5'
implementation 'org.hsqldb:hsqldb:2.7.2'
implementation 'org.postgresql:postgresql:42.6.0' implementation 'org.postgresql:postgresql:42.6.0'
implementation 'org.hibernate:hibernate-core:6.2.7.Final' implementation 'org.hibernate:hibernate-core:6.3.1.Final'
implementation 'org.hibernate:hibernate-hikaricp:6.2.7.Final' implementation 'org.hibernate:hibernate-hikaricp:6.3.1.Final'
implementation 'org.liquibase:liquibase-core:4.23.2'
implementation('org.liquibase.ext:liquibase-yugabytedb:4.23.2') { exclude group: 'org.liquibase' }
implementation 'com.zaxxer:HikariCP:5.0.1' implementation 'com.zaxxer:HikariCP:5.0.1'
implementation 'org.springframework.security:spring-security-crypto:6.1.2' implementation 'org.springframework.security:spring-security-crypto:6.1.4'
implementation 'commons-logging:commons-logging:1.2' implementation 'commons-logging:commons-logging:1.2'
implementation(platform("com.squareup.okhttp3:okhttp-bom:4.11.0")) implementation(platform("com.squareup.okhttp3:okhttp-bom:4.11.0"))
implementation 'com.squareup.okhttp3:okhttp' implementation 'com.squareup.okhttp3:okhttp'
implementation 'com.squareup.okhttp3:okhttp-brotli' implementation 'com.squareup.okhttp3:okhttp-brotli'
implementation 'com.nimbusds:oauth2-oidc-sdk:10.9.1' implementation 'io.sentry:sentry:6.30.0'
implementation 'io.sentry:sentry:6.28.0' implementation 'rocks.kavin:reqwest4j:1.0.12'
implementation 'rocks.kavin:reqwest4j:1.0.7' implementation 'io.minio:minio:8.5.6'
implementation 'io.minio:minio:8.5.4' compileOnly 'org.projectlombok:lombok:1.18.30'
annotationProcessor 'org.projectlombok:lombok:1.18.30'
} }
shadowJar { shadowJar {
@ -59,5 +62,5 @@ jar {
group = 'me.kavin.piped' group = 'me.kavin.piped'
version = '1.0' version = '1.0'
sourceCompatibility = JavaVersion.VERSION_17 sourceCompatibility = JavaVersion.VERSION_21
targetCompatibility = JavaVersion.VERSION_17 targetCompatibility = JavaVersion.VERSION_21

View File

@ -8,6 +8,9 @@ PROXY_PART:https://pipedproxy-cdg.kavin.rocks
# Outgoing proxy to be used by reqwest4j - eg: socks5://127.0.0.1:1080 # Outgoing proxy to be used by reqwest4j - eg: socks5://127.0.0.1:1080
#REQWEST_PROXY: socks5://127.0.0.1:1080 #REQWEST_PROXY: socks5://127.0.0.1:1080
# Optional proxy username and password
#REQWEST_PROXY_USER: username
#REQWEST_PROXY_PASS: password
# Captcha Parameters # Captcha Parameters
CAPTCHA_BASE_URL:https://api.capmonster.cloud/ CAPTCHA_BASE_URL:https://api.capmonster.cloud/

View File

@ -9,7 +9,7 @@ services:
depends_on: depends_on:
- postgres - postgres
postgres: postgres:
image: postgres:15-alpine image: postgres:16-alpine
restart: unless-stopped restart: unless-stopped
volumes: volumes:
- ./data/db:/var/lib/postgresql/data - ./data/db:/var/lib/postgresql/data

6
docker-healthcheck.sh Executable file
View File

@ -0,0 +1,6 @@
#!/usr/bin/env sh
# If PORT env var is set, use it, otherwise default to 8080
PORT=${PORT:-8080}
curl -f http://localhost:$PORT/healthcheck || exit 1

Binary file not shown.

View File

@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists
distributionUrl=https\://downloads.gradle.org/distributions/gradle-8.2.1-bin.zip distributionUrl=https\://downloads.gradle.org/distributions/gradle-8.4-bin.zip
networkTimeout=10000 networkTimeout=10000
validateDistributionUrl=true validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME

17
gradlew vendored
View File

@ -83,7 +83,8 @@ done
# This is normally unused # This is normally unused
# shellcheck disable=SC2034 # shellcheck disable=SC2034
APP_BASE_NAME=${0##*/} APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value. # Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum MAX_FD=maximum
@ -144,7 +145,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #( case $MAX_FD in #(
max*) max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045 # shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) || MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit" warn "Could not query maximum file descriptor limit"
esac esac
@ -152,7 +153,7 @@ if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
'' | soft) :;; #( '' | soft) :;; #(
*) *)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045 # shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" || ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD" warn "Could not set maximum file descriptor limit to $MAX_FD"
esac esac
@ -201,11 +202,11 @@ fi
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command; # Collect all arguments for the java command:
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# shell script including quotes and variable substitutions, so put them in # and any embedded shellness will be escaped.
# double quotes to make sure that they get re-expanded; and # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# * put everything else in single quotes, so that it's not re-expanded. # treated as '${Hostname}' itself on the command line.
set -- \ set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \ "-Dorg.gradle.appname=$APP_BASE_NAME" \

View File

@ -18,9 +18,8 @@ import org.hibernate.StatelessSession;
import org.schabi.newpipe.extractor.NewPipe; import org.schabi.newpipe.extractor.NewPipe;
import org.schabi.newpipe.extractor.localization.ContentCountry; import org.schabi.newpipe.extractor.localization.ContentCountry;
import org.schabi.newpipe.extractor.localization.Localization; import org.schabi.newpipe.extractor.localization.Localization;
import org.schabi.newpipe.extractor.services.youtube.YoutubeThrottlingDecrypter; import org.schabi.newpipe.extractor.services.youtube.YoutubeJavaScriptPlayerManager;
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeStreamExtractor; import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeStreamExtractor;
import rocks.kavin.reqwest4j.ReqwestUtils;
import java.util.*; import java.util.*;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
@ -47,17 +46,24 @@ public class Main {
Injector.useSpecializer(); Injector.useSpecializer();
Multithreading.runAsync(() -> new Thread(new SyncRunner( try {
LiquibaseHelper.init();
} catch (Exception e) {
ExceptionHandler.handle(e);
System.exit(1);
}
Multithreading.runAsync(() -> Thread.ofVirtual().start(new SyncRunner(
new OkHttpClient.Builder().readTimeout(60, TimeUnit.SECONDS).build(), new OkHttpClient.Builder().readTimeout(60, TimeUnit.SECONDS).build(),
MATRIX_SERVER, MATRIX_SERVER,
MatrixHelper.MATRIX_TOKEN) MatrixHelper.MATRIX_TOKEN)
).start()); ));
new Timer().scheduleAtFixedRate(new TimerTask() { new Timer().scheduleAtFixedRate(new TimerTask() {
@Override @Override
public void run() { public void run() {
System.out.printf("ThrottlingCache: %o entries%n", YoutubeThrottlingDecrypter.getCacheSize()); System.out.printf("ThrottlingCache: %o entries%n", YoutubeJavaScriptPlayerManager.getThrottlingParametersCacheSize());
YoutubeThrottlingDecrypter.clearCache(); YoutubeJavaScriptPlayerManager.clearThrottlingParametersCache();
} }
}, 0, TimeUnit.MINUTES.toMillis(60)); }, 0, TimeUnit.MINUTES.toMillis(60));

View File

@ -53,6 +53,8 @@ public class Constants {
public static final String PUBSUB_HUB_URL; public static final String PUBSUB_HUB_URL;
public static final String REQWEST_PROXY; public static final String REQWEST_PROXY;
public static final String REQWEST_PROXY_USER;
public static final String REQWEST_PROXY_PASS;
public static final String FRONTEND_URL; public static final String FRONTEND_URL;
@ -134,7 +136,9 @@ public class Constants {
PUBSUB_URL = getProperty(prop, "PUBSUB_URL", PUBLIC_URL); PUBSUB_URL = getProperty(prop, "PUBSUB_URL", PUBLIC_URL);
PUBSUB_HUB_URL = getProperty(prop, "PUBSUB_HUB_URL", "https://pubsubhubbub.appspot.com/subscribe"); PUBSUB_HUB_URL = getProperty(prop, "PUBSUB_HUB_URL", "https://pubsubhubbub.appspot.com/subscribe");
REQWEST_PROXY = getProperty(prop, "REQWEST_PROXY"); REQWEST_PROXY = getProperty(prop, "REQWEST_PROXY");
ReqwestUtils.init(REQWEST_PROXY); REQWEST_PROXY_USER = getProperty(prop, "REQWEST_PROXY_USER");
REQWEST_PROXY_PASS = getProperty(prop, "REQWEST_PROXY_PASS");
ReqwestUtils.init(REQWEST_PROXY, REQWEST_PROXY_USER, REQWEST_PROXY_PASS);
FRONTEND_URL = getProperty(prop, "FRONTEND_URL", "https://piped.video"); FRONTEND_URL = getProperty(prop, "FRONTEND_URL", "https://piped.video");
COMPROMISED_PASSWORD_CHECK = Boolean.parseBoolean(getProperty(prop, "COMPROMISED_PASSWORD_CHECK", "true")); COMPROMISED_PASSWORD_CHECK = Boolean.parseBoolean(getProperty(prop, "COMPROMISED_PASSWORD_CHECK", "true"));
DISABLE_REGISTRATION = Boolean.parseBoolean(getProperty(prop, "DISABLE_REGISTRATION", "false")); DISABLE_REGISTRATION = Boolean.parseBoolean(getProperty(prop, "DISABLE_REGISTRATION", "false"));

View File

@ -97,53 +97,7 @@ public class ServerLauncher extends MultithreadedHttpServerLauncher {
})).map(POST, "/webhooks/pubsub", AsyncServlet.ofBlocking(executor, request -> { })).map(POST, "/webhooks/pubsub", AsyncServlet.ofBlocking(executor, request -> {
try { try {
SyndFeed feed = new SyndFeedInput().build( PubSubHandlers.handlePubSub(request.loadBody().getResult().asArray());
new InputSource(new ByteArrayInputStream(request.loadBody().getResult().asArray())));
Multithreading.runAsyncLimited(() -> {
for (var entry : feed.getEntries()) {
String url = entry.getLinks().get(0).getHref();
String videoId = StringUtils.substring(url, -11);
try (StatelessSession s = DatabaseSessionFactory.createStatelessSession()) {
if (DatabaseHelper.doesVideoExist(s, videoId))
continue;
}
Multithreading.runAsyncLimited(() -> {
try {
Sentry.setExtra("videoId", videoId);
var extractor = YOUTUBE_SERVICE.getStreamExtractor("https://youtube.com/watch?v=" + videoId);
extractor.fetchPage();
Multithreading.runAsync(() -> {
DateWrapper uploadDate;
try {
uploadDate = extractor.getUploadDate();
} catch (ParsingException e) {
throw new RuntimeException(e);
}
if (uploadDate != null && System.currentTimeMillis() - uploadDate.offsetDateTime().toInstant().toEpochMilli() < TimeUnit.DAYS.toMillis(Constants.FEED_RETENTION)) {
try {
MatrixHelper.sendEvent("video.piped.stream.info", new FederatedVideoInfo(
StringUtils.substring(extractor.getUrl(), -11), StringUtils.substring(extractor.getUploaderUrl(), -24),
extractor.getName(),
extractor.getLength(), extractor.getViewCount())
);
} catch (Exception e) {
ExceptionHandler.handle(e);
}
}
});
VideoHelpers.handleNewVideo(extractor, entry.getPublishedDate().getTime(), null);
} catch (Exception e) {
ExceptionHandler.handle(e);
}
});
}
});
return HttpResponse.ofCode(204); return HttpResponse.ofCode(204);

View File

@ -32,7 +32,7 @@ import static me.kavin.piped.consts.Constants.YOUTUBE_SERVICE;
import static me.kavin.piped.consts.Constants.mapper; import static me.kavin.piped.consts.Constants.mapper;
import static me.kavin.piped.utils.CollectionUtils.collectPreloadedTabs; import static me.kavin.piped.utils.CollectionUtils.collectPreloadedTabs;
import static me.kavin.piped.utils.CollectionUtils.collectRelatedItems; import static me.kavin.piped.utils.CollectionUtils.collectRelatedItems;
import static me.kavin.piped.utils.URLUtils.rewriteURL; import static me.kavin.piped.utils.URLUtils.getLastThumbnail;
public class ChannelHandlers { public class ChannelHandlers {
public static byte[] channelResponse(String channelPath) throws Exception { public static byte[] channelResponse(String channelPath) throws Exception {
@ -77,7 +77,7 @@ public class ChannelHandlers {
Multithreading.runAsync(() -> { Multithreading.runAsync(() -> {
try { try {
MatrixHelper.sendEvent("video.piped.channel.info", new FederatedChannelInfo( MatrixHelper.sendEvent("video.piped.channel.info", new FederatedChannelInfo(
info.getId(), StringUtils.abbreviate(info.getName(), 100), info.getAvatarUrl(), info.isVerified()) info.getId(), StringUtils.abbreviate(info.getName(), 100), info.getAvatars().isEmpty() ? null : info.getAvatars().getLast().getUrl(), info.isVerified())
); );
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
@ -93,7 +93,7 @@ public class ChannelHandlers {
if (channel != null) { if (channel != null) {
ChannelHelpers.updateChannel(s, channel, StringUtils.abbreviate(info.getName(), 100), info.getAvatarUrl(), info.isVerified()); ChannelHelpers.updateChannel(s, channel, StringUtils.abbreviate(info.getName(), 100), info.getAvatars().isEmpty() ? null : info.getAvatars().getLast().getUrl(), info.isVerified());
Set<String> ids = tabInfo.getRelatedItems() Set<String> ids = tabInfo.getRelatedItems()
.stream() .stream()
@ -159,8 +159,8 @@ public class ChannelHandlers {
} }
}).toList(); }).toList();
final Channel channel = new Channel(info.getId(), info.getName(), rewriteURL(info.getAvatarUrl()), final Channel channel = new Channel(info.getId(), info.getName(), getLastThumbnail(info.getAvatars()),
rewriteURL(info.getBannerUrl()), info.getDescription(), info.getSubscriberCount(), info.isVerified(), getLastThumbnail(info.getBanners()), info.getDescription(), info.getSubscriberCount(), info.isVerified(),
nextpage, relatedStreams, tabs); nextpage, relatedStreams, tabs);
return mapper.writeValueAsBytes(channel); return mapper.writeValueAsBytes(channel);
@ -210,6 +210,57 @@ public class ChannelHandlers {
List<ContentItem> items = collectRelatedItems(info.getRelatedItems()); List<ContentItem> items = collectRelatedItems(info.getRelatedItems());
Multithreading.runAsync(() -> {
var channel = DatabaseHelper.getChannelFromId(info.getId());
if (channel != null) {
try (StatelessSession s = DatabaseSessionFactory.createStatelessSession()) {
var streamInfoItems = info.getRelatedItems()
.stream()
.parallel()
.filter(StreamInfoItem.class::isInstance)
.map(StreamInfoItem.class::cast)
.toList();
var channelIds = streamInfoItems
.stream()
.map(item -> {
try {
return YOUTUBE_SERVICE.getStreamLHFactory().getId(item.getUrl());
} catch (ParsingException e) {
throw new RuntimeException(e);
}
}).collect(Collectors.toUnmodifiableSet());
List<String> videoIdsPresent = DatabaseHelper.getVideosFromIds(s, channelIds)
.stream()
.map(Video::getId)
.toList();
streamInfoItems
.stream()
.parallel()
.forEach(item -> {
try {
String id = YOUTUBE_SERVICE.getStreamLHFactory().getId(item.getUrl());
if (videoIdsPresent.contains(id))
VideoHelpers.updateVideo(id, item);
else if (item.getUploadDate() != null) {
// shorts tab doesn't have upload date
// we don't want to fetch each video's upload date
long time = item.getUploadDate().offsetDateTime().toInstant().toEpochMilli();
if ((System.currentTimeMillis() - time) < TimeUnit.DAYS.toMillis(Constants.FEED_RETENTION))
VideoHelpers.handleNewVideo(item.getUrl(), time, channel);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
});
}
}
});
String nextpage = null; String nextpage = null;
if (info.hasNextPage()) { if (info.hasNextPage()) {
Page page = info.getNextPage(); Page page = info.getNextPage();

View File

@ -30,8 +30,7 @@ import static java.nio.charset.StandardCharsets.UTF_8;
import static me.kavin.piped.consts.Constants.YOUTUBE_SERVICE; import static me.kavin.piped.consts.Constants.YOUTUBE_SERVICE;
import static me.kavin.piped.consts.Constants.mapper; import static me.kavin.piped.consts.Constants.mapper;
import static me.kavin.piped.utils.CollectionUtils.collectRelatedItems; import static me.kavin.piped.utils.CollectionUtils.collectRelatedItems;
import static me.kavin.piped.utils.URLUtils.rewriteURL; import static me.kavin.piped.utils.URLUtils.*;
import static me.kavin.piped.utils.URLUtils.substringYouTube;
public class PlaylistHandlers { public class PlaylistHandlers {
public static byte[] playlistResponse(String playlistId) throws Exception { public static byte[] playlistResponse(String playlistId) throws Exception {
@ -60,10 +59,10 @@ public class PlaylistHandlers {
nextpage = mapper.writeValueAsString(page); nextpage = mapper.writeValueAsString(page);
} }
final Playlist playlist = new Playlist(info.getName(), rewriteURL(info.getThumbnailUrl()), final Playlist playlist = new Playlist(info.getName(), getLastThumbnail(info.getThumbnails()),
info.getDescription().getContent(), rewriteURL(info.getBannerUrl()), nextpage, info.getDescription().getContent(), getLastThumbnail(info.getBanners()), nextpage,
info.getUploaderName().isEmpty() ? null : info.getUploaderName(), info.getUploaderName().isEmpty() ? null : info.getUploaderName(),
substringYouTube(info.getUploaderUrl()), rewriteURL(info.getUploaderAvatarUrl()), substringYouTube(info.getUploaderUrl()), getLastThumbnail(info.getUploaderAvatars()),
(int) info.getStreamCount(), relatedStreams); (int) info.getStreamCount(), relatedStreams);
return mapper.writeValueAsBytes(playlist); return mapper.writeValueAsBytes(playlist);

View File

@ -0,0 +1,100 @@
package me.kavin.piped.server.handlers;
import com.rometools.rome.feed.synd.SyndFeed;
import com.rometools.rome.io.SyndFeedInput;
import io.sentry.Sentry;
import me.kavin.piped.consts.Constants;
import me.kavin.piped.utils.*;
import me.kavin.piped.utils.obj.MatrixHelper;
import me.kavin.piped.utils.obj.federation.FederatedVideoInfo;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.StatelessSession;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.localization.DateWrapper;
import org.xml.sax.InputSource;
import java.io.ByteArrayInputStream;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import static me.kavin.piped.consts.Constants.YOUTUBE_SERVICE;
public class PubSubHandlers {
private static final LinkedBlockingQueue<String> pubSubQueue = new LinkedBlockingQueue<>();
public static void handlePubSub(byte[] body) throws Exception {
SyndFeed feed = new SyndFeedInput().build(new InputSource(new ByteArrayInputStream(body)));
for (var entry : feed.getEntries()) {
String url = entry.getLinks().get(0).getHref();
String videoId = StringUtils.substring(url, -11);
long publishedDate = entry.getPublishedDate().getTime();
String str = videoId + ":" + publishedDate;
if (pubSubQueue.contains(str))
continue;
pubSubQueue.put(str);
}
}
static {
for (int i = 0; i < Runtime.getRuntime().availableProcessors(); i++) {
new Thread(() -> {
try {
while (true) {
String str = pubSubQueue.take();
String videoId = StringUtils.substringBefore(str, ":");
long publishedDate = Long.parseLong(StringUtils.substringAfter(str, ":"));
try (StatelessSession s = DatabaseSessionFactory.createStatelessSession()) {
if (DatabaseHelper.doesVideoExist(s, videoId))
continue;
}
try {
Sentry.setExtra("videoId", videoId);
var extractor = YOUTUBE_SERVICE.getStreamExtractor("https://youtube.com/watch?v=" + videoId);
extractor.fetchPage();
Multithreading.runAsync(() -> {
DateWrapper uploadDate;
try {
uploadDate = extractor.getUploadDate();
} catch (ParsingException e) {
throw new RuntimeException(e);
}
if (uploadDate != null && System.currentTimeMillis() - uploadDate.offsetDateTime().toInstant().toEpochMilli() < TimeUnit.DAYS.toMillis(Constants.FEED_RETENTION)) {
try {
MatrixHelper.sendEvent("video.piped.stream.info", new FederatedVideoInfo(
StringUtils.substring(extractor.getUrl(), -11), StringUtils.substring(extractor.getUploaderUrl(), -24),
extractor.getName(),
extractor.getLength(), extractor.getViewCount())
);
} catch (Exception e) {
ExceptionHandler.handle(e);
}
}
});
VideoHelpers.handleNewVideo(extractor, publishedDate, null);
} catch (Exception e) {
ExceptionHandler.handle(e);
}
}
} catch (Exception e) {
ExceptionHandler.handle(e);
}
}, "PubSub-Worker-" + i).start();
}
}
}

View File

@ -36,8 +36,7 @@ import java.util.concurrent.TimeoutException;
import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.charset.StandardCharsets.UTF_8;
import static me.kavin.piped.consts.Constants.YOUTUBE_SERVICE; import static me.kavin.piped.consts.Constants.YOUTUBE_SERVICE;
import static me.kavin.piped.consts.Constants.mapper; import static me.kavin.piped.consts.Constants.mapper;
import static me.kavin.piped.utils.URLUtils.rewriteURL; import static me.kavin.piped.utils.URLUtils.*;
import static me.kavin.piped.utils.URLUtils.substringYouTube;
import static org.schabi.newpipe.extractor.NewPipe.getPreferredContentCountry; import static org.schabi.newpipe.extractor.NewPipe.getPreferredContentCountry;
import static org.schabi.newpipe.extractor.NewPipe.getPreferredLocalization; import static org.schabi.newpipe.extractor.NewPipe.getPreferredLocalization;
import static org.schabi.newpipe.extractor.services.youtube.YoutubeParsingHelper.getJsonPostResponse; import static org.schabi.newpipe.extractor.services.youtube.YoutubeParsingHelper.getJsonPostResponse;
@ -342,10 +341,10 @@ public class StreamHandlers {
if (comment.getReplies() != null) if (comment.getReplies() != null)
repliespage = mapper.writeValueAsString(comment.getReplies()); repliespage = mapper.writeValueAsString(comment.getReplies());
comments.add(new Comment(comment.getUploaderName(), rewriteURL(comment.getUploaderAvatarUrl()), comments.add(new Comment(comment.getUploaderName(), getLastThumbnail(comment.getUploaderAvatars()),
comment.getCommentId(), Optional.ofNullable(comment.getCommentText()).map(Description::getContent).orElse(null), comment.getTextualUploadDate(), comment.getCommentId(), Optional.ofNullable(comment.getCommentText()).map(Description::getContent).orElse(null), comment.getTextualUploadDate(),
substringYouTube(comment.getUploaderUrl()), repliespage, comment.getLikeCount(), comment.getReplyCount(), substringYouTube(comment.getUploaderUrl()), repliespage, comment.getLikeCount(), comment.getReplyCount(),
comment.isHeartedByUploader(), comment.isPinned(), comment.isUploaderVerified())); comment.isHeartedByUploader(), comment.isPinned(), comment.isUploaderVerified(), comment.hasCreatorReply()));
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
ExceptionHandler.handle(e); ExceptionHandler.handle(e);
} }
@ -380,10 +379,10 @@ public class StreamHandlers {
if (comment.getReplies() != null) if (comment.getReplies() != null)
repliespage = mapper.writeValueAsString(comment.getReplies()); repliespage = mapper.writeValueAsString(comment.getReplies());
comments.add(new Comment(comment.getUploaderName(), rewriteURL(comment.getUploaderAvatarUrl()), comments.add(new Comment(comment.getUploaderName(), getLastThumbnail(comment.getUploaderAvatars()),
comment.getCommentId(), Optional.ofNullable(comment.getCommentText()).map(Description::getContent).orElse(null), comment.getTextualUploadDate(), comment.getCommentId(), Optional.ofNullable(comment.getCommentText()).map(Description::getContent).orElse(null), comment.getTextualUploadDate(),
substringYouTube(comment.getUploaderUrl()), repliespage, comment.getLikeCount(), comment.getReplyCount(), substringYouTube(comment.getUploaderUrl()), repliespage, comment.getLikeCount(), comment.getReplyCount(),
comment.isHeartedByUploader(), comment.isPinned(), comment.isUploaderVerified())); comment.isHeartedByUploader(), comment.isPinned(), comment.isUploaderVerified(), comment.hasCreatorReply()));
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
ExceptionHandler.handle(e); ExceptionHandler.handle(e);
} }

View File

@ -9,7 +9,6 @@ import com.rometools.rome.io.SyndFeedOutput;
import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2ObjectOpenHashMap;
import it.unimi.dsi.fastutil.objects.ObjectArrayList; import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet; import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet;
import jakarta.persistence.criteria.JoinType;
import me.kavin.piped.consts.Constants; import me.kavin.piped.consts.Constants;
import me.kavin.piped.utils.*; import me.kavin.piped.utils.*;
import me.kavin.piped.utils.obj.ContentItem; import me.kavin.piped.utils.obj.ContentItem;
@ -23,7 +22,7 @@ import me.kavin.piped.utils.resp.AuthenticationFailureResponse;
import me.kavin.piped.utils.resp.InvalidRequestResponse; import me.kavin.piped.utils.resp.InvalidRequestResponse;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.hibernate.Session; import org.hibernate.Session;
import org.hibernate.internal.util.ExceptionHelper; import org.hibernate.StatelessSession;
import org.schabi.newpipe.extractor.Page; import org.schabi.newpipe.extractor.Page;
import org.schabi.newpipe.extractor.exceptions.ExtractionException; import org.schabi.newpipe.extractor.exceptions.ExtractionException;
import org.schabi.newpipe.extractor.playlist.PlaylistInfo; import org.schabi.newpipe.extractor.playlist.PlaylistInfo;
@ -269,7 +268,7 @@ public class AuthPlaylistHandlers {
channel = DatabaseHelper.saveChannel(channelId); channel = DatabaseHelper.saveChannel(channelId);
} }
video = new PlaylistVideo(videoId, info.getName(), info.getThumbnailUrl(), info.getDuration(), channel); video = new PlaylistVideo(videoId, info.getName(), info.getThumbnails().getLast().getUrl(), info.getDuration(), channel);
var tr = s.beginTransaction(); var tr = s.beginTransaction();
try { try {
@ -312,11 +311,16 @@ public class AuthPlaylistHandlers {
if (StringUtils.isBlank(session) || StringUtils.isBlank(playlistId)) if (StringUtils.isBlank(session) || StringUtils.isBlank(playlistId))
ExceptionHandler.throwErrorResponse(new InvalidRequestResponse("session and playlistId are required parameters")); ExceptionHandler.throwErrorResponse(new InvalidRequestResponse("session and playlistId are required parameters"));
try (Session s = DatabaseSessionFactory.createSession()) { if (index < 0)
return mapper.writeValueAsBytes(mapper.createObjectNode()
.put("error", "Video Index out of bounds"));
long internalId;
try (StatelessSession s = DatabaseSessionFactory.createStatelessSession()) {
var cb = s.getCriteriaBuilder(); var cb = s.getCriteriaBuilder();
var query = cb.createQuery(me.kavin.piped.utils.obj.db.Playlist.class); var query = cb.createQuery(me.kavin.piped.utils.obj.db.Playlist.class);
var root = query.from(me.kavin.piped.utils.obj.db.Playlist.class); var root = query.from(me.kavin.piped.utils.obj.db.Playlist.class);
root.fetch("videos", JoinType.RIGHT);
query.where(cb.equal(root.get("playlist_id"), UUID.fromString(playlistId))); query.where(cb.equal(root.get("playlist_id"), UUID.fromString(playlistId)));
var playlist = s.createQuery(query).uniqueResult(); var playlist = s.createQuery(query).uniqueResult();
@ -327,19 +331,31 @@ public class AuthPlaylistHandlers {
if (playlist.getOwner().getId() != DatabaseHelper.getUserFromSession(session).getId()) if (playlist.getOwner().getId() != DatabaseHelper.getUserFromSession(session).getId())
return mapper.writeValueAsBytes(mapper.createObjectNode() return mapper.writeValueAsBytes(mapper.createObjectNode()
.put("error", "You are not the owner this playlist")); .put("error", "You are not the owner this playlist"));
internalId = playlist.getId();
}
if (index < 0 || index >= playlist.getVideos().size()) try (Session s = DatabaseSessionFactory.createSession()) {
return mapper.writeValueAsBytes(mapper.createObjectNode()
.put("error", "Video Index out of bounds"));
playlist.getVideos().remove(index);
var tr = s.beginTransaction(); var tr = s.beginTransaction();
s.merge(playlist);
tr.commit();
return mapper.writeValueAsBytes(new AcceptedResponse()); var updated = s.createNativeMutationQuery("DELETE FROM playlists_videos_ids WHERE playlist_id = :playlistId AND videos_order = :index")
.setParameter("playlistId", internalId)
.setParameter("index", index)
.executeUpdate();
if (updated > 0) {
s.createNativeMutationQuery("UPDATE playlists_videos_ids SET videos_order = videos_order - 1 WHERE playlist_id = :playlistId AND videos_order > :index")
.setParameter("playlistId", internalId)
.setParameter("index", index)
.executeUpdate();
} else
return mapper.writeValueAsBytes(mapper.createObjectNode()
.put("error", "Video Index not found"));
tr.commit();
} }
return mapper.writeValueAsBytes(new AcceptedResponse());
} }
public static byte[] clearPlaylistResponse(String session, String playlistId) throws IOException { public static byte[] clearPlaylistResponse(String session, String playlistId) throws IOException {
@ -386,7 +402,7 @@ public class AuthPlaylistHandlers {
PlaylistInfo info = PlaylistInfo.getInfo(url); PlaylistInfo info = PlaylistInfo.getInfo(url);
var playlist = new me.kavin.piped.utils.obj.db.Playlist(info.getName(), user, info.getThumbnailUrl()); var playlist = new me.kavin.piped.utils.obj.db.Playlist(info.getName(), user, info.getThumbnails().getLast().getUrl());
List<StreamInfoItem> videos = new ObjectArrayList<>(info.getRelatedItems()); List<StreamInfoItem> videos = new ObjectArrayList<>(info.getRelatedItems());
@ -435,7 +451,7 @@ public class AuthPlaylistHandlers {
var channel = channelMap.get(channelId); var channel = channelMap.get(channelId);
playlist.getVideos().add(videoMap.computeIfAbsent(videoId, (key) -> new PlaylistVideo(videoId, video.getName(), video.getThumbnailUrl(), video.getDuration(), channel))); playlist.getVideos().add(videoMap.computeIfAbsent(videoId, (key) -> new PlaylistVideo(videoId, video.getName(), video.getThumbnails().getLast().getUrl(), video.getDuration(), channel)));
}); });
var tr = s.beginTransaction(); var tr = s.beginTransaction();

View File

@ -1,5 +1,7 @@
package me.kavin.piped.utils; package me.kavin.piped.utils;
import com.rometools.modules.mediarss.MediaEntryModuleImpl;
import com.rometools.modules.mediarss.types.*;
import com.rometools.rome.feed.synd.*; import com.rometools.rome.feed.synd.*;
import me.kavin.piped.consts.Constants; import me.kavin.piped.consts.Constants;
import me.kavin.piped.utils.obj.db.Channel; import me.kavin.piped.utils.obj.db.Channel;
@ -11,6 +13,7 @@ import org.hibernate.StatelessSession;
import java.io.IOException; import java.io.IOException;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL; import java.net.URL;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
@ -79,6 +82,7 @@ public class ChannelHelpers {
entry.setAuthors(Collections.singletonList(person)); entry.setAuthors(Collections.singletonList(person));
entry.setLink(Constants.FRONTEND_URL + "/watch?v=" + video.getId()); entry.setLink(Constants.FRONTEND_URL + "/watch?v=" + video.getId());
entry.setUri(Constants.FRONTEND_URL + "/watch?v=" + video.getId()); entry.setUri(Constants.FRONTEND_URL + "/watch?v=" + video.getId());
entry.setTitle(video.getTitle()); entry.setTitle(video.getTitle());
entry.setPublishedDate(new Date(video.getUploaded())); entry.setPublishedDate(new Date(video.getUploaded()));
@ -95,6 +99,23 @@ public class ChannelHelpers {
entry.setContents(List.of(thumbnail, content)); entry.setContents(List.of(thumbnail, content));
// the Media RSS content for embedding videos starts here
// see https://www.rssboard.org/media-rss#media-content
String playerUrl = Constants.FRONTEND_URL + "/embed/" + video.getId();
MediaContent media = new MediaContent(new PlayerReference(URI.create(playerUrl)));
media.setDuration(video.getDuration());
Metadata metadata = new Metadata();
metadata.setTitle(video.getTitle());
Thumbnail metadataThumbnail = new Thumbnail(URI.create(video.getThumbnail()));
metadata.setThumbnail(new Thumbnail[]{ metadataThumbnail });
media.setMetadata(metadata);
MediaEntryModuleImpl mediaModule = new MediaEntryModuleImpl();
mediaModule.setMediaContents(new MediaContent[]{ media });
entry.getModules().add(mediaModule);
return entry; return entry;
} }
} }

View File

@ -71,7 +71,7 @@ public class CollectionUtils {
return new Streams(info.getName(), info.getDescription().getContent(), return new Streams(info.getName(), info.getDescription().getContent(),
info.getTextualUploadDate(), info.getUploaderName(), substringYouTube(info.getUploaderUrl()), info.getTextualUploadDate(), info.getUploaderName(), substringYouTube(info.getUploaderUrl()),
rewriteURL(info.getUploaderAvatarUrl()), rewriteURL(info.getThumbnailUrl()), info.getDuration(), getLastThumbnail(info.getUploaderAvatars()), getLastThumbnail(info.getThumbnails()), info.getDuration(),
info.getViewCount(), info.getLikeCount(), info.getDislikeCount(), info.getUploaderSubscriberCount(), info.isUploaderVerified(), info.getViewCount(), info.getLikeCount(), info.getDislikeCount(), info.getUploaderSubscriberCount(), info.isUploaderVerified(),
audioStreams, videoStreams, relatedStreams, subtitles, livestream, rewriteVideoURL(info.getHlsUrl()), audioStreams, videoStreams, relatedStreams, subtitles, livestream, rewriteVideoURL(info.getHlsUrl()),
rewriteVideoURL(info.getDashMpdUrl()), null, info.getCategory(), info.getLicence(), rewriteVideoURL(info.getDashMpdUrl()), null, info.getCategory(), info.getLicence(),
@ -101,9 +101,9 @@ public class CollectionUtils {
StreamInfoItem item = (StreamInfoItem) o; StreamInfoItem item = (StreamInfoItem) o;
return new StreamItem(substringYouTube(item.getUrl()), item.getName(), return new StreamItem(substringYouTube(item.getUrl()), item.getName(),
rewriteURL(item.getThumbnailUrl()), getLastThumbnail(item.getThumbnails()),
item.getUploaderName(), substringYouTube(item.getUploaderUrl()), item.getUploaderName(), substringYouTube(item.getUploaderUrl()),
rewriteURL(item.getUploaderAvatarUrl()), item.getTextualUploadDate(), getLastThumbnail(item.getUploaderAvatars()), item.getTextualUploadDate(),
item.getShortDescription(), item.getDuration(), item.getShortDescription(), item.getDuration(),
item.getViewCount(), item.getUploadDate() != null ? item.getViewCount(), item.getUploadDate() != null ?
item.getUploadDate().offsetDateTime().toInstant().toEpochMilli() : -1, item.getUploadDate().offsetDateTime().toInstant().toEpochMilli() : -1,
@ -115,7 +115,7 @@ public class CollectionUtils {
PlaylistInfoItem item = (PlaylistInfoItem) o; PlaylistInfoItem item = (PlaylistInfoItem) o;
return new PlaylistItem(substringYouTube(item.getUrl()), item.getName(), return new PlaylistItem(substringYouTube(item.getUrl()), item.getName(),
rewriteURL(item.getThumbnailUrl()), getLastThumbnail(item.getThumbnails()),
item.getUploaderName(), substringYouTube(item.getUploaderUrl()), item.getUploaderName(), substringYouTube(item.getUploaderUrl()),
item.isUploaderVerified(), item.isUploaderVerified(),
item.getPlaylistType().name(), item.getStreamCount()); item.getPlaylistType().name(), item.getStreamCount());
@ -126,7 +126,7 @@ public class CollectionUtils {
ChannelInfoItem item = (ChannelInfoItem) o; ChannelInfoItem item = (ChannelInfoItem) o;
return new ChannelItem(substringYouTube(item.getUrl()), item.getName(), return new ChannelItem(substringYouTube(item.getUrl()), item.getName(),
rewriteURL(item.getThumbnailUrl()), getLastThumbnail(item.getThumbnails()),
item.getDescription(), item.getSubscriberCount(), item.getStreamCount(), item.getDescription(), item.getSubscriberCount(), item.getStreamCount(),
item.isVerified()); item.isVerified());
} }

View File

@ -192,7 +192,7 @@ public class DatabaseHelper {
} }
var channel = new Channel(channelId, StringUtils.abbreviate(info.getName(), 100), var channel = new Channel(channelId, StringUtils.abbreviate(info.getName(), 100),
info.getAvatarUrl(), info.isVerified()); info.getAvatars().isEmpty() ? null : info.getAvatars().getLast().getUrl(), info.isVerified());
try (StatelessSession s = DatabaseSessionFactory.createStatelessSession()) { try (StatelessSession s = DatabaseSessionFactory.createStatelessSession()) {
var tr = s.beginTransaction(); var tr = s.beginTransaction();
@ -214,9 +214,11 @@ public class DatabaseHelper {
CollectionUtils.collectPreloadedTabs(info.getTabs()) CollectionUtils.collectPreloadedTabs(info.getTabs())
.stream() .stream()
.parallel() .parallel()
.map(tab -> { .mapMulti((tab, consumer) -> {
try { try {
return ChannelTabInfo.getInfo(YOUTUBE_SERVICE, tab).getRelatedItems(); ChannelTabInfo.getInfo(YOUTUBE_SERVICE, tab)
.getRelatedItems()
.forEach(consumer);
} catch (ExtractionException | IOException e) { } catch (ExtractionException | IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -224,11 +226,11 @@ public class DatabaseHelper {
.filter(StreamInfoItem.class::isInstance) .filter(StreamInfoItem.class::isInstance)
.map(StreamInfoItem.class::cast) .map(StreamInfoItem.class::cast)
.forEach(item -> { .forEach(item -> {
long time = item.getUploadDate() != null long time = item.getUploadDate() != null
? item.getUploadDate().offsetDateTime().toInstant().toEpochMilli() ? item.getUploadDate().offsetDateTime().toInstant().toEpochMilli()
: System.currentTimeMillis(); : System.currentTimeMillis();
if ((System.currentTimeMillis() - time) < TimeUnit.DAYS.toMillis(Constants.FEED_RETENTION)) if ((System.currentTimeMillis() - time) < TimeUnit.DAYS.toMillis(Constants.FEED_RETENTION))
VideoHelpers.handleNewVideo(item.getUrl(), time, channel); VideoHelpers.handleNewVideo(item.getUrl(), time, channel);
}); });
}); });

View File

@ -0,0 +1,50 @@
package me.kavin.piped.utils;
import liquibase.Liquibase;
import liquibase.Scope;
import liquibase.command.CommandScope;
import liquibase.command.core.UpdateCommandStep;
import liquibase.command.core.helpers.DbUrlConnectionCommandStep;
import liquibase.database.Database;
import liquibase.database.DatabaseFactory;
import liquibase.database.jvm.JdbcConnection;
import liquibase.resource.ClassLoaderResourceAccessor;
import me.kavin.piped.consts.Constants;
import java.sql.DriverManager;
import java.util.HashMap;
import java.util.Map;
public class LiquibaseHelper {
public static void init() throws Exception {
String url = Constants.hibernateProperties.get("hibernate.connection.url");
String username = Constants.hibernateProperties.get("hibernate.connection.username");
String password = Constants.hibernateProperties.get("hibernate.connection.password");
// ensure postgres driver is loaded
DriverManager.registerDriver(new org.postgresql.Driver());
// register YugabyteDB database
DatabaseFactory.getInstance().register(new liquibase.ext.yugabytedb.database.YugabyteDBDatabase());
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(DriverManager.getConnection(url, username, password)));
try (Liquibase liquibase = new Liquibase("changelog/db.changelog-master.xml", new ClassLoaderResourceAccessor(), database)) {
Map<String, Object> scopeObjects = new HashMap<>();
scopeObjects.put(Scope.Attr.database.name(), liquibase.getDatabase());
scopeObjects.put(Scope.Attr.resourceAccessor.name(), liquibase.getResourceAccessor());
Scope.child(scopeObjects, () -> {
CommandScope updateCommand = new CommandScope(UpdateCommandStep.COMMAND_NAME);
updateCommand.addArgumentValue(DbUrlConnectionCommandStep.DATABASE_ARG, liquibase.getDatabase());
updateCommand.addArgumentValue(UpdateCommandStep.CHANGELOG_FILE_ARG, liquibase.getChangeLogFile());
updateCommand.execute();
});
}
}
}

View File

@ -5,7 +5,7 @@ import java.util.function.Supplier;
public class Multithreading { public class Multithreading {
private static final ExecutorService es = Executors.newCachedThreadPool(); private static final ExecutorService es = Executors.newVirtualThreadPerTaskExecutor();
private static final ExecutorService esLimited = Executors private static final ExecutorService esLimited = Executors
.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 8); .newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 8);
private static final ExecutorService esLimitedPubSub = Executors private static final ExecutorService esLimitedPubSub = Executors

View File

@ -2,12 +2,14 @@ package me.kavin.piped.utils;
import me.kavin.piped.consts.Constants; import me.kavin.piped.consts.Constants;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.schabi.newpipe.extractor.Image;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.net.URLDecoder; import java.net.URLDecoder;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.List;
public class URLUtils { public class URLUtils {
@ -37,6 +39,10 @@ public class URLUtils {
return rewriteURL(old, Constants.IMAGE_PROXY_PART); return rewriteURL(old, Constants.IMAGE_PROXY_PART);
} }
public static String getLastThumbnail(final List<Image> thumbnails) {
return thumbnails.isEmpty() ? null : rewriteURL(thumbnails.getLast().getUrl());
}
public static String rewriteVideoURL(final String old) { public static String rewriteVideoURL(final String old) {
return rewriteURL(old, Constants.PROXY_PART); return rewriteURL(old, Constants.PROXY_PART);
} }

View File

@ -6,6 +6,7 @@ import me.kavin.piped.consts.Constants;
import me.kavin.piped.utils.obj.db.Video; import me.kavin.piped.utils.obj.db.Video;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.hibernate.StatelessSession; import org.hibernate.StatelessSession;
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeStreamExtractor;
import org.schabi.newpipe.extractor.stream.StreamExtractor; import org.schabi.newpipe.extractor.stream.StreamExtractor;
import org.schabi.newpipe.extractor.stream.StreamInfo; import org.schabi.newpipe.extractor.stream.StreamInfo;
import org.schabi.newpipe.extractor.stream.StreamInfoItem; import org.schabi.newpipe.extractor.stream.StreamInfoItem;
@ -14,6 +15,7 @@ import java.util.Optional;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.charset.StandardCharsets.UTF_8;
import static me.kavin.piped.consts.Constants.YOUTUBE_SERVICE;
import static org.schabi.newpipe.extractor.NewPipe.getPreferredContentCountry; import static org.schabi.newpipe.extractor.NewPipe.getPreferredContentCountry;
import static org.schabi.newpipe.extractor.NewPipe.getPreferredLocalization; import static org.schabi.newpipe.extractor.NewPipe.getPreferredLocalization;
import static org.schabi.newpipe.extractor.services.youtube.YoutubeParsingHelper.getJsonPostResponse; import static org.schabi.newpipe.extractor.services.youtube.YoutubeParsingHelper.getJsonPostResponse;
@ -22,7 +24,9 @@ import static org.schabi.newpipe.extractor.services.youtube.YoutubeParsingHelper
public class VideoHelpers { public class VideoHelpers {
public static void handleNewVideo(String url, long time, me.kavin.piped.utils.obj.db.Channel channel) { public static void handleNewVideo(String url, long time, me.kavin.piped.utils.obj.db.Channel channel) {
try { try {
handleNewVideo(StreamInfo.getInfo(url), time, channel); var extractor = YOUTUBE_SERVICE.getStreamExtractor(url);
extractor.fetchPage();
handleNewVideo(extractor, time, channel);
} catch (Exception e) { } catch (Exception e) {
ExceptionHandler.handle(e); ExceptionHandler.handle(e);
} }
@ -46,7 +50,7 @@ public class VideoHelpers {
if (!DatabaseHelper.doesVideoExist(s, info.getId())) { if (!DatabaseHelper.doesVideoExist(s, info.getId())) {
Video video = new Video(info.getId(), info.getName(), info.getViewCount(), info.getDuration(), Video video = new Video(info.getId(), info.getName(), info.getViewCount(), info.getDuration(),
Math.max(infoTime, time), info.getThumbnailUrl(), info.isShortFormContent(), channel); Math.max(infoTime, time), info.getThumbnails().getLast().getUrl(), info.isShortFormContent(), channel);
insertVideo(video); insertVideo(video);
return; return;
@ -77,7 +81,7 @@ public class VideoHelpers {
boolean isShort = extractor.isShortFormContent() || isShort(extractor.getId()); boolean isShort = extractor.isShortFormContent() || isShort(extractor.getId());
Video video = new Video(extractor.getId(), extractor.getName(), extractor.getViewCount(), extractor.getLength(), Video video = new Video(extractor.getId(), extractor.getName(), extractor.getViewCount(), extractor.getLength(),
Math.max(infoTime, time), extractor.getThumbnailUrl(), isShort, channel); Math.max(infoTime, time), extractor.getThumbnails().getLast().getUrl(), isShort, channel);
insertVideo(video); insertVideo(video);

View File

@ -129,7 +129,7 @@ public class SyncRunner implements Runnable {
var type = event.get("type").asText(); var type = event.get("type").asText();
var content = event.at("/content/content"); var content = event.at("/content/content");
if (type.startsWith("video.piped.stream.bypass.")) { if (!UNAUTHENTICATED && type.startsWith("video.piped.stream.bypass.")) {
switch (type) { switch (type) {
case "video.piped.stream.bypass.request" -> { case "video.piped.stream.bypass.request" -> {
FederatedGeoBypassRequest bypassRequest = mapper.treeToValue(content, FederatedGeoBypassRequest.class); FederatedGeoBypassRequest bypassRequest = mapper.treeToValue(content, FederatedGeoBypassRequest.class);

View File

@ -4,10 +4,10 @@ public class Comment {
public String author, thumbnail, commentId, commentText, commentedTime, commentorUrl, repliesPage; public String author, thumbnail, commentId, commentText, commentedTime, commentorUrl, repliesPage;
public int likeCount, replyCount; public int likeCount, replyCount;
public boolean hearted, pinned, verified; public boolean hearted, pinned, verified, creatorReplied;
public Comment(String author, String thumbnail, String commentId, String commentText, String commentedTime, public Comment(String author, String thumbnail, String commentId, String commentText, String commentedTime,
String commentorUrl, String repliesPage, int likeCount, int replyCount, boolean hearted, boolean pinned, boolean verified) { String commentorUrl, String repliesPage, int likeCount, int replyCount, boolean hearted, boolean pinned, boolean verified, boolean creatorReplied) {
this.author = author; this.author = author;
this.thumbnail = thumbnail; this.thumbnail = thumbnail;
this.commentId = commentId; this.commentId = commentId;
@ -20,5 +20,6 @@ public class Comment {
this.hearted = hearted; this.hearted = hearted;
this.pinned = pinned; this.pinned = pinned;
this.verified = verified; this.verified = verified;
this.creatorReplied = creatorReplied;
} }
} }

View File

@ -8,9 +8,10 @@ import java.util.Set;
import java.util.UUID; import java.util.UUID;
@Entity @Entity
@Table(name = "users", indexes = {@Index(columnList = "id", name = "users_id_idx"), @Table(name = "users", indexes = {
@Index(columnList = "username", name = "username_idx"), @Index(columnList = "username", name = "username_idx"),
@Index(columnList = "session_id", name = "users_session_id_idx")}) @Index(columnList = "session_id", name = "users_session_id_idx")
})
public class User implements Serializable { public class User implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
https://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
<include file="version/0-init.xml" relativeToChangelogFile="true"/>
<include file="version/1-fix-subs.xml" relativeToChangelogFile="true"/>
</databaseChangeLog>

View File

@ -0,0 +1,2 @@
CREATE EXTENSION pgcrypto;
--rollback DROP EXTENSION IF EXISTS pgcrypto;

View File

@ -0,0 +1,47 @@
CREATE INDEX IF NOT EXISTS users_session_id_idx ON users (session_id ASC) STORING (password, username);
--rollback DROP INDEX IF EXISTS users_session_id_idx;
CREATE TABLE IF NOT EXISTS videos (
id VARCHAR(11) NOT NULL UNIQUE,
duration INT8 NULL,
thumbnail VARCHAR(400) NULL,
title VARCHAR(120) NULL,
uploaded INT8 NULL,
views INT8 NULL,
uploader_id VARCHAR(24) NOT NULL,
is_short BOOL NOT NULL DEFAULT false,
CONSTRAINT videos_pkey PRIMARY KEY (id ASC, uploader_id ASC) USING HASH,
CONSTRAINT fk_videos_channels_uploader_id FOREIGN KEY (uploader_id) REFERENCES channels(uploader_id),
INDEX videos_id_idx (id ASC),
INDEX video_uploaded_idx (uploaded ASC) USING HASH,
INDEX video_uploader_id_idx (uploader_id ASC) STORING (duration, thumbnail, title, uploaded, views, is_short),
UNIQUE INDEX videos_id_key (id ASC) STORING (duration, thumbnail, title, uploaded, views, is_short)
);
--rollback DROP TABLE IF EXISTS videos;
CREATE TABLE IF NOT EXISTS users_subscribed (
subscriber INT8 NOT NULL,
channel VARCHAR(24) NOT NULL,
CONSTRAINT users_subscribed_pkey PRIMARY KEY (subscriber ASC, channel ASC) USING HASH,
CONSTRAINT fk_subscriber_users FOREIGN KEY (subscriber) REFERENCES users(id),
INDEX users_subscribed_subscriber_idx (subscriber ASC),
INDEX users_subscribed_channel_idx (channel ASC)
);
--rollback DROP TABLE IF EXISTS users_subscribed;
CREATE INDEX IF NOT EXISTS pubsub_subbed_at_idx ON pubsub (subbed_at ASC) USING HASH;
--rollback DROP INDEX IF EXISTS pubsub_subbed_at_idx;
CREATE INDEX IF NOT EXISTS playlists_playlist_id_idx ON playlists (playlist_id ASC) STORING (name, short_description, thumbnail, owner);
CREATE INDEX IF NOT EXISTS playlists_owner_idx ON playlists (owner ASC) STORING (name, short_description, thumbnail, playlist_id);
--rollback DROP INDEX IF EXISTS playlists_playlist_id_idx;
--rollback DROP INDEX IF EXISTS playlists_owner_idx;
CREATE INDEX IF NOT EXISTS unauthenticated_subscriptions_id_idx ON unauthenticated_subscriptions (id ASC) USING HASH STORING (subscribed_at);
--rollback DROP INDEX IF EXISTS unauthenticated_subscriptions_id_idx;

View File

@ -0,0 +1,48 @@
CREATE INDEX IF NOT EXISTS users_session_id_idx ON users (session_id ASC);
--rollback DROP INDEX IF EXISTS users_session_id_idx;
CREATE TABLE IF NOT EXISTS videos (
id VARCHAR(11) NOT NULL UNIQUE,
duration INT8 NULL,
thumbnail VARCHAR(400) NULL,
title VARCHAR(120) NULL,
uploaded INT8 NULL,
views INT8 NULL,
uploader_id VARCHAR(24) NOT NULL,
is_short BOOL NOT NULL DEFAULT false,
CONSTRAINT videos_pkey PRIMARY KEY (id, uploader_id),
CONSTRAINT fk_videos_channels_uploader_id FOREIGN KEY (uploader_id) REFERENCES channels(uploader_id)
);
CREATE UNIQUE INDEX IF NOT EXISTS videos_id_idx ON videos (id ASC);
CREATE INDEX IF NOT EXISTS video_uploaded_idx ON videos (uploaded ASC);
CREATE INDEX IF NOT EXISTS video_uploader_id_idx ON videos (uploader_id ASC);
--rollback DROP TABLE IF EXISTS videos;
CREATE TABLE IF NOT EXISTS users_subscribed (
subscriber INT8 NOT NULL,
channel VARCHAR(24) NOT NULL,
CONSTRAINT users_subscribed_pkey PRIMARY KEY (subscriber, channel),
CONSTRAINT fk_subscriber_users FOREIGN KEY (subscriber) REFERENCES users(id)
);
CREATE INDEX IF NOT EXISTS users_subscribed_subscriber_idx ON users_subscribed (subscriber ASC);
CREATE INDEX IF NOT EXISTS users_subscribed_channel_idx ON users_subscribed (channel ASC);
--rollback DROP TABLE IF EXISTS users_subscribed;
CREATE INDEX IF NOT EXISTS pubsub_subbed_at_idx ON pubsub (subbed_at ASC);
--rollback DROP INDEX IF EXISTS pubsub_subbed_at_idx;
CREATE INDEX IF NOT EXISTS playlists_playlist_id_idx ON playlists (playlist_id ASC);
CREATE INDEX IF NOT EXISTS playlists_owner_idx ON playlists (owner ASC);
--rollback DROP INDEX IF EXISTS playlists_playlist_id_idx;
--rollback DROP INDEX IF EXISTS playlists_owner_idx;
CREATE INDEX IF NOT EXISTS unauthenticated_subscriptions_id_idx ON unauthenticated_subscriptions (id ASC);
--rollback DROP INDEX IF EXISTS unauthenticated_subscriptions_id_idx;

View File

@ -0,0 +1,87 @@
CREATE TABLE IF NOT EXISTS users (
id BIGSERIAL NOT NULL,
password TEXT NULL,
session_id VARCHAR(36) NULL,
username VARCHAR(24) NULL UNIQUE,
CONSTRAINT users_pkey PRIMARY KEY (id)
);
DROP INDEX IF EXISTS users_id_idx;
CREATE INDEX IF NOT EXISTS username_idx ON users (username ASC);
--rollback DROP TABLE IF EXISTS users;
CREATE TABLE IF NOT EXISTS channels (
uploader_id VARCHAR(24) NOT NULL,
uploader VARCHAR(100) NULL,
uploader_avatar VARCHAR(150) NULL,
verified BOOL NULL,
CONSTRAINT channels_pkey PRIMARY KEY (uploader_id)
);
CREATE INDEX IF NOT EXISTS channels_uploader_idx ON channels (uploader ASC);
--rollback DROP TABLE IF EXISTS channels;
CREATE TABLE IF NOT EXISTS pubsub (
id VARCHAR(24) NOT NULL,
subbed_at INT8 NULL,
CONSTRAINT pubsub_pkey PRIMARY KEY (id)
);
CREATE INDEX IF NOT EXISTS pubsub_id_idx ON pubsub (id ASC);
--rollback DROP TABLE IF EXISTS pubsub;
CREATE TABLE IF NOT EXISTS playlists (
id BIGSERIAL NOT NULL,
name VARCHAR(200) NULL,
playlist_id UUID NOT NULL UNIQUE DEFAULT gen_random_uuid(),
short_description VARCHAR(100) NULL,
thumbnail VARCHAR(300) NULL,
owner INT8 NOT NULL,
CONSTRAINT playlists_pkey PRIMARY KEY (id),
CONSTRAINT fk_playlists_owner FOREIGN KEY (owner) REFERENCES users(id)
);
--rollback DROP TABLE IF EXISTS playlists;
CREATE TABLE IF NOT EXISTS playlist_videos (
id VARCHAR(11) NOT NULL,
duration INT8 NULL,
thumbnail VARCHAR(400) NULL,
title VARCHAR(120) NULL,
uploader_id VARCHAR(24) NOT NULL,
CONSTRAINT playlist_videos_pkey PRIMARY KEY (id),
CONSTRAINT fk_playlist_video_uploader_id FOREIGN KEY (uploader_id) REFERENCES channels(uploader_id)
);
CREATE INDEX IF NOT EXISTS playlist_videos_id_idx ON playlist_videos (id ASC);
CREATE INDEX IF NOT EXISTS playlist_videos_uploader_id_idx ON playlist_videos (uploader_id ASC);
--rollback DROP TABLE IF EXISTS playlist_videos;
CREATE TABLE IF NOT EXISTS playlists_videos_ids (
playlist_id INT8 NOT NULL,
videos_id VARCHAR(11) NOT NULL,
videos_order INT4 NOT NULL,
CONSTRAINT playlists_videos_ids_pkey PRIMARY KEY (playlist_id, videos_order),
CONSTRAINT fk_playlists_videos_video_id_playlist_video FOREIGN KEY (videos_id) REFERENCES playlist_videos(id),
CONSTRAINT fk_playlists_videos_playlist_id_playlist FOREIGN KEY (playlist_id) REFERENCES playlists(id)
);
CREATE INDEX IF NOT EXISTS playlists_videos_ids_playlist_id_idx ON playlists_videos_ids (playlist_id ASC);
--rollback DROP TABLE IF EXISTS playlists_videos_ids;
CREATE TABLE IF NOT EXISTS unauthenticated_subscriptions (
id VARCHAR(24) NOT NULL,
subscribed_at INT8 NOT NULL,
CONSTRAINT unauthenticated_subscriptions_pkey PRIMARY KEY (id),
CONSTRAINT fk_unauthenticated_subscriptions_id_channels FOREIGN KEY (id) REFERENCES channels(uploader_id)
);
CREATE INDEX IF NOT EXISTS unauthenticated_subscriptions_subscribed_at_idx ON unauthenticated_subscriptions (subscribed_at ASC);
--rollback DROP TABLE IF EXISTS unauthenticated_subscriptions;

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
https://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
<changeSet id="0-0" author="kavin" runInTransaction="false">
<sqlFile path="0-0-init-yb.sql" dbms="yugabytedb" relativeToChangelogFile="true"/>
</changeSet>
<changeSet id="0-1" author="kavin" runInTransaction="false">
<sqlFile path="0-1-init.sql" relativeToChangelogFile="true"/>
<sqlFile path="0-1-init-crdb.sql" dbms="cockroachdb" relativeToChangelogFile="true"/>
<sqlFile path="0-1-init-pg.sql" dbms="postgresql,yugabytedb" relativeToChangelogFile="true"/>
</changeSet>
</databaseChangeLog>

View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
https://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
<changeSet id="1-0" author="kavin" runInTransaction="false">
<!-- drop constraint since it prevents breaks unauthenticated subscriptions from working -->
<sql>ALTER TABLE unauthenticated_subscriptions DROP CONSTRAINT IF EXISTS fk_unauthenticated_subscriptions_id_channels;</sql>
<rollback>
<sql>ALTER TABLE unauthenticated_subscriptions ADD CONSTRAINT fk_unauthenticated_subscriptions_id_channels FOREIGN KEY (id) REFERENCES channels(uploader_id);</sql>
</rollback>
</changeSet>
</databaseChangeLog>

View File

@ -4,11 +4,11 @@
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd"> "http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
<hibernate-configuration> <hibernate-configuration>
<session-factory> <session-factory>
<property name="hibernate.hbm2ddl.auto">update</property> <property name="hibernate.hbm2ddl.auto">validate</property>
<!-- Optional: Show SQL output for debugging --> <!-- Optional: Show SQL output for debugging -->
<property name="hibernate.show_sql">false</property> <property name="hibernate.show_sql">false</property>
<property name="hibernate.format_sql">true</property> <property name="hibernate.format_sql">true</property>
<property name="hibernate.connection.provider_class">org.hibernate.hikaricp.internal.HikariCPConnectionProvider</property> <property name="hibernate.connection.provider_class">org.hibernate.hikaricp.internal.HikariCPConnectionProvider</property>
<property name="hibernate.connection.handling_mode">DELAYED_ACQUISITION_AND_RELEASE_AFTER_STATEMENT</property> <property name="hibernate.connection.handling_mode">DELAYED_ACQUISITION_AND_RELEASE_AFTER_STATEMENT</property>
<property name="hibernate.jdbc.batch_size">50</property> <property name="hibernate.jdbc.batch_size">50</property>

View File

@ -1,6 +1,6 @@
#!/bin/bash #!/bin/bash
CURLOPTS=(-i -s -S -o /dev/null -f -w "%{http_code}\tTime:\t%{time_starttransfer}\t%{url_effective}\n") CURLOPTS=(-i -s -S --max-time 60 -o /dev/null -f -w "%{http_code}\tTime:\t%{time_starttransfer}\t%{url_effective}\n")
HOST="127.0.0.1:8080" HOST="127.0.0.1:8080"
# Healthcheck Test # Healthcheck Test

View File

@ -1,18 +0,0 @@
# The port to Listen on.
PORT: 8080
# Proxy
PROXY_PART: https://pipedproxy-ams.kavin.rocks
# Public API URL
API_URL: https://pipedapi.kavin.rocks
# Public Frontend URL
FRONTEND_URL: https://piped.video
# Hibernate properties
hibernate.connection.url: jdbc:hsqldb:mem:memdb;sql.syntax_pgs=true
hibernate.connection.driver_class: org.hsqldb.jdbcDriver
hibernate.dialect: org.hibernate.dialect.HSQLDialect
hibernate.connection.username: piped
hibernate.connection.password: changeme

View File

@ -1,8 +0,0 @@
services:
piped:
image: 1337kavin/piped:latest
restart: unless-stopped
ports:
- "127.0.0.1:8080:8080"
volumes:
- ./config.hsqldb.properties:/app/config.properties