mirror of
https://github.com/TeamPiped/Piped-Backend.git
synced 2024-12-12 21:30:29 +05:30
Merge pull request #677 from TeamPiped/liquibase-migrations
Add liquibase to perform database migrations
This commit is contained in:
commit
c223dace93
25
.github/workflows/docker-build-test.yml
vendored
25
.github/workflows/docker-build-test.yml
vendored
@ -9,13 +9,16 @@ on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build-jdk:
|
||||
uses: ./.github/workflows/fat-build.yml
|
||||
|
||||
build-test:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-jdk
|
||||
strategy:
|
||||
matrix:
|
||||
docker-compose-file:
|
||||
- docker-compose.yml
|
||||
- testing/docker-compose.hsqldb.yml
|
||||
- testing/docker-compose.cockroachdb.yml
|
||||
- testing/docker-compose.yugabytedb.yml
|
||||
dockerfile:
|
||||
@ -23,20 +26,20 @@ jobs:
|
||||
- Dockerfile.azul.ci
|
||||
- Dockerfile.openj9.ci
|
||||
- Dockerfile.graalvm-jvm.ci
|
||||
include:
|
||||
- sleep: 20
|
||||
- docker-compose-file: testing/docker-compose.cockroachdb.yml
|
||||
sleep: 30
|
||||
- docker-compose-file: testing/docker-compose.yugabytedb.yml
|
||||
sleep: 120
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: piped.jar
|
||||
- name: Create Version File
|
||||
run: echo $(git log -1 --date=short --pretty=format:%cd)-$(git rev-parse --short HEAD) > VERSION
|
||||
- name: set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: 17
|
||||
distribution: temurin
|
||||
cache: "gradle"
|
||||
- name: Run Build
|
||||
run: ./gradlew shadowJar
|
||||
- run: mv build/libs/piped-*-all.jar piped.jar
|
||||
- name: Build Image Locally
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
@ -45,7 +48,7 @@ jobs:
|
||||
file: ${{ matrix.dockerfile }}
|
||||
tags: 1337kavin/piped:latest
|
||||
- name: Start Docker-Compose services
|
||||
run: docker-compose -f ${{ matrix.docker-compose-file }} up -d && sleep 20
|
||||
run: docker-compose -f ${{ matrix.docker-compose-file }} up -d && sleep ${{ matrix.sleep }}
|
||||
- name: Run tests
|
||||
run: ./testing/api-test.sh
|
||||
- name: Collect services logs
|
||||
|
16
.github/workflows/docker-build.yml
vendored
16
.github/workflows/docker-build.yml
vendored
@ -8,7 +8,11 @@ on:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build-jdk:
|
||||
uses: ./.github/workflows/fat-build.yml
|
||||
|
||||
build-docker:
|
||||
needs: build-jdk
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
@ -23,17 +27,11 @@ jobs:
|
||||
dockerfile: ./Dockerfile.graalvm-jvm.ci
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: piped.jar
|
||||
- name: Create Version File
|
||||
run: echo $(git log -1 --date=short --pretty=format:%cd)-$(git rev-parse --short HEAD) > VERSION
|
||||
- name: set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: 17
|
||||
distribution: temurin
|
||||
cache: "gradle"
|
||||
- name: Run Build
|
||||
run: ./gradlew shadowJar
|
||||
- run: mv build/libs/piped-*-all.jar piped.jar
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
|
82
.github/workflows/docker-migrations-build-test.yml
vendored
Normal file
82
.github/workflows/docker-migrations-build-test.yml
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
name: Docker-Compose Build and Test Migration
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "src/main/resources/changelog/**"
|
||||
- "src/main/java/me/kavin/piped/utils/obj/db/**"
|
||||
|
||||
jobs:
|
||||
build-new:
|
||||
uses: ./.github/workflows/fat-build.yml
|
||||
build-old:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
- name: set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: 17
|
||||
distribution: temurin
|
||||
cache: "gradle"
|
||||
- name: Run Build
|
||||
run: ./gradlew shadowJar
|
||||
- run: mv build/libs/piped-*-all.jar piped.jar
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: piped-old.jar
|
||||
path: piped.jar
|
||||
|
||||
docker-build-test:
|
||||
needs: [ build-new, build-old ]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
docker-compose-file:
|
||||
- docker-compose.yml
|
||||
- testing/docker-compose.cockroachdb.yml
|
||||
- testing/docker-compose.yugabytedb.yml
|
||||
dockerfile:
|
||||
- Dockerfile.ci
|
||||
include:
|
||||
- sleep: 20
|
||||
- docker-compose-file: testing/docker-compose.cockroachdb.yml
|
||||
sleep: 30
|
||||
- docker-compose-file: testing/docker-compose.yugabytedb.yml
|
||||
sleep: 120
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: echo "unknown" > VERSION
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: piped-old.jar
|
||||
- name: Build Old Image Locally
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
load: true
|
||||
file: ${{ matrix.dockerfile }}
|
||||
tags: 1337kavin/piped:latest
|
||||
- name: Start Docker-Compose services
|
||||
run: docker-compose -f ${{ matrix.docker-compose-file }} up -d && sleep ${{ matrix.sleep }}
|
||||
- run: rm piped.jar
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: piped.jar
|
||||
- name: Build New Image Locally
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
load: true
|
||||
file: ${{ matrix.dockerfile }}
|
||||
tags: 1337kavin/piped:latest
|
||||
- name: Start Docker-Compose services
|
||||
run: docker-compose -f ${{ matrix.docker-compose-file }} up -d && sleep ${{ matrix.sleep }}
|
||||
- name: Run tests
|
||||
run: ./testing/api-test.sh
|
||||
- name: Collect services logs
|
||||
if: failure()
|
||||
run: docker-compose -f ${{ matrix.docker-compose-file }} logs
|
24
.github/workflows/fat-build.yml
vendored
Normal file
24
.github/workflows/fat-build.yml
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
name: Fat JAR Build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: set up JDK 17
|
||||
uses: actions/setup-java@v3
|
||||
with:
|
||||
java-version: 17
|
||||
distribution: temurin
|
||||
cache: "gradle"
|
||||
- name: Run Build
|
||||
run: ./gradlew shadowJar
|
||||
- run: mv build/libs/piped-*-all.jar piped.jar
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: piped.jar
|
||||
path: piped.jar
|
@ -29,10 +29,11 @@ dependencies {
|
||||
implementation 'io.activej:activej-boot:5.5'
|
||||
implementation 'io.activej:activej-specializer:5.5'
|
||||
implementation 'io.activej:activej-launchers-http:5.5'
|
||||
implementation 'org.hsqldb:hsqldb:2.7.2'
|
||||
implementation 'org.postgresql:postgresql:42.6.0'
|
||||
implementation 'org.hibernate:hibernate-core:6.2.7.Final'
|
||||
implementation 'org.hibernate:hibernate-hikaricp:6.2.7.Final'
|
||||
implementation 'org.liquibase:liquibase-core:4.23.1'
|
||||
implementation('org.liquibase.ext:liquibase-yugabytedb:4.23.0') { exclude group: 'org.liquibase' }
|
||||
implementation 'com.zaxxer:HikariCP:5.0.1'
|
||||
implementation 'org.springframework.security:spring-security-crypto:6.1.2'
|
||||
implementation 'commons-logging:commons-logging:1.2'
|
||||
|
@ -20,7 +20,6 @@ import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.services.youtube.YoutubeThrottlingDecrypter;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeStreamExtractor;
|
||||
import rocks.kavin.reqwest4j.ReqwestUtils;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
@ -47,6 +46,13 @@ public class Main {
|
||||
|
||||
Injector.useSpecializer();
|
||||
|
||||
try {
|
||||
LiquibaseHelper.init();
|
||||
} catch (Exception e) {
|
||||
ExceptionHandler.handle(e);
|
||||
System.exit(1);
|
||||
}
|
||||
|
||||
Multithreading.runAsync(() -> new Thread(new SyncRunner(
|
||||
new OkHttpClient.Builder().readTimeout(60, TimeUnit.SECONDS).build(),
|
||||
MATRIX_SERVER,
|
||||
|
50
src/main/java/me/kavin/piped/utils/LiquibaseHelper.java
Normal file
50
src/main/java/me/kavin/piped/utils/LiquibaseHelper.java
Normal file
@ -0,0 +1,50 @@
|
||||
package me.kavin.piped.utils;
|
||||
|
||||
import liquibase.Liquibase;
|
||||
import liquibase.Scope;
|
||||
import liquibase.command.CommandScope;
|
||||
import liquibase.command.core.UpdateCommandStep;
|
||||
import liquibase.command.core.helpers.DbUrlConnectionCommandStep;
|
||||
import liquibase.database.Database;
|
||||
import liquibase.database.DatabaseFactory;
|
||||
import liquibase.database.jvm.JdbcConnection;
|
||||
import liquibase.resource.ClassLoaderResourceAccessor;
|
||||
import me.kavin.piped.consts.Constants;
|
||||
|
||||
import java.sql.DriverManager;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class LiquibaseHelper {
|
||||
|
||||
public static void init() throws Exception {
|
||||
|
||||
String url = Constants.hibernateProperties.get("hibernate.connection.url");
|
||||
String username = Constants.hibernateProperties.get("hibernate.connection.username");
|
||||
String password = Constants.hibernateProperties.get("hibernate.connection.password");
|
||||
|
||||
// ensure postgres driver is loaded
|
||||
DriverManager.registerDriver(new org.postgresql.Driver());
|
||||
|
||||
// register YugabyteDB database
|
||||
DatabaseFactory.getInstance().register(new liquibase.ext.yugabytedb.database.YugabyteDBDatabase());
|
||||
|
||||
Database database = DatabaseFactory.getInstance().findCorrectDatabaseImplementation(new JdbcConnection(DriverManager.getConnection(url, username, password)));
|
||||
|
||||
try (Liquibase liquibase = new Liquibase("changelog/db.changelog-master.xml", new ClassLoaderResourceAccessor(), database)) {
|
||||
|
||||
Map<String, Object> scopeObjects = new HashMap<>();
|
||||
scopeObjects.put(Scope.Attr.database.name(), liquibase.getDatabase());
|
||||
scopeObjects.put(Scope.Attr.resourceAccessor.name(), liquibase.getResourceAccessor());
|
||||
|
||||
Scope.child(scopeObjects, () -> {
|
||||
CommandScope updateCommand = new CommandScope(UpdateCommandStep.COMMAND_NAME);
|
||||
updateCommand.addArgumentValue(DbUrlConnectionCommandStep.DATABASE_ARG, liquibase.getDatabase());
|
||||
updateCommand.addArgumentValue(UpdateCommandStep.CHANGELOG_FILE_ARG, liquibase.getChangeLogFile());
|
||||
updateCommand.execute();
|
||||
});
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -8,9 +8,10 @@ import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "users", indexes = {@Index(columnList = "id", name = "users_id_idx"),
|
||||
@Table(name = "users", indexes = {
|
||||
@Index(columnList = "username", name = "username_idx"),
|
||||
@Index(columnList = "session_id", name = "users_session_id_idx")})
|
||||
@Index(columnList = "session_id", name = "users_session_id_idx")
|
||||
})
|
||||
public class User implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
9
src/main/resources/changelog/db.changelog-master.xml
Normal file
9
src/main/resources/changelog/db.changelog-master.xml
Normal file
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
https://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
|
||||
|
||||
<include file="version/0-init.xml" relativeToChangelogFile="true"/>
|
||||
|
||||
</databaseChangeLog>
|
2
src/main/resources/changelog/version/0-0-init-yb.sql
Normal file
2
src/main/resources/changelog/version/0-0-init-yb.sql
Normal file
@ -0,0 +1,2 @@
|
||||
CREATE EXTENSION pgcrypto;
|
||||
--rollback DROP EXTENSION IF EXISTS pgcrypto;
|
47
src/main/resources/changelog/version/0-1-init-crdb.sql
Normal file
47
src/main/resources/changelog/version/0-1-init-crdb.sql
Normal file
@ -0,0 +1,47 @@
|
||||
CREATE INDEX IF NOT EXISTS users_session_id_idx ON users (session_id ASC) STORING (password, username);
|
||||
|
||||
--rollback DROP INDEX IF EXISTS users_session_id_idx;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS videos (
|
||||
id VARCHAR(11) NOT NULL UNIQUE,
|
||||
duration INT8 NULL,
|
||||
thumbnail VARCHAR(400) NULL,
|
||||
title VARCHAR(120) NULL,
|
||||
uploaded INT8 NULL,
|
||||
views INT8 NULL,
|
||||
uploader_id VARCHAR(24) NOT NULL,
|
||||
is_short BOOL NOT NULL DEFAULT false,
|
||||
CONSTRAINT videos_pkey PRIMARY KEY (id ASC, uploader_id ASC) USING HASH,
|
||||
CONSTRAINT fk_videos_channels_uploader_id FOREIGN KEY (uploader_id) REFERENCES channels(uploader_id),
|
||||
INDEX videos_id_idx (id ASC),
|
||||
INDEX video_uploaded_idx (uploaded ASC) USING HASH,
|
||||
INDEX video_uploader_id_idx (uploader_id ASC) STORING (duration, thumbnail, title, uploaded, views, is_short),
|
||||
UNIQUE INDEX videos_id_key (id ASC) STORING (duration, thumbnail, title, uploaded, views, is_short)
|
||||
);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS videos;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users_subscribed (
|
||||
subscriber INT8 NOT NULL,
|
||||
channel VARCHAR(24) NOT NULL,
|
||||
CONSTRAINT users_subscribed_pkey PRIMARY KEY (subscriber ASC, channel ASC) USING HASH,
|
||||
CONSTRAINT fk_subscriber_users FOREIGN KEY (subscriber) REFERENCES users(id),
|
||||
INDEX users_subscribed_subscriber_idx (subscriber ASC),
|
||||
INDEX users_subscribed_channel_idx (channel ASC)
|
||||
);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS users_subscribed;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pubsub_subbed_at_idx ON pubsub (subbed_at ASC) USING HASH;
|
||||
|
||||
--rollback DROP INDEX IF EXISTS pubsub_subbed_at_idx;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS playlists_playlist_id_idx ON playlists (playlist_id ASC) STORING (name, short_description, thumbnail, owner);
|
||||
CREATE INDEX IF NOT EXISTS playlists_owner_idx ON playlists (owner ASC) STORING (name, short_description, thumbnail, playlist_id);
|
||||
|
||||
--rollback DROP INDEX IF EXISTS playlists_playlist_id_idx;
|
||||
--rollback DROP INDEX IF EXISTS playlists_owner_idx;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS unauthenticated_subscriptions_id_idx ON unauthenticated_subscriptions (id ASC) USING HASH STORING (subscribed_at);
|
||||
|
||||
--rollback DROP INDEX IF EXISTS unauthenticated_subscriptions_id_idx;
|
48
src/main/resources/changelog/version/0-1-init-pg.sql
Normal file
48
src/main/resources/changelog/version/0-1-init-pg.sql
Normal file
@ -0,0 +1,48 @@
|
||||
CREATE INDEX IF NOT EXISTS users_session_id_idx ON users (session_id ASC);
|
||||
|
||||
--rollback DROP INDEX IF EXISTS users_session_id_idx;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS videos (
|
||||
id VARCHAR(11) NOT NULL UNIQUE,
|
||||
duration INT8 NULL,
|
||||
thumbnail VARCHAR(400) NULL,
|
||||
title VARCHAR(120) NULL,
|
||||
uploaded INT8 NULL,
|
||||
views INT8 NULL,
|
||||
uploader_id VARCHAR(24) NOT NULL,
|
||||
is_short BOOL NOT NULL DEFAULT false,
|
||||
CONSTRAINT videos_pkey PRIMARY KEY (id, uploader_id),
|
||||
CONSTRAINT fk_videos_channels_uploader_id FOREIGN KEY (uploader_id) REFERENCES channels(uploader_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS videos_id_idx ON videos (id ASC);
|
||||
CREATE INDEX IF NOT EXISTS video_uploaded_idx ON videos (uploaded ASC);
|
||||
CREATE INDEX IF NOT EXISTS video_uploader_id_idx ON videos (uploader_id ASC);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS videos;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS users_subscribed (
|
||||
subscriber INT8 NOT NULL,
|
||||
channel VARCHAR(24) NOT NULL,
|
||||
CONSTRAINT users_subscribed_pkey PRIMARY KEY (subscriber, channel),
|
||||
CONSTRAINT fk_subscriber_users FOREIGN KEY (subscriber) REFERENCES users(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS users_subscribed_subscriber_idx ON users_subscribed (subscriber ASC);
|
||||
CREATE INDEX IF NOT EXISTS users_subscribed_channel_idx ON users_subscribed (channel ASC);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS users_subscribed;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pubsub_subbed_at_idx ON pubsub (subbed_at ASC);
|
||||
|
||||
--rollback DROP INDEX IF EXISTS pubsub_subbed_at_idx;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS playlists_playlist_id_idx ON playlists (playlist_id ASC);
|
||||
CREATE INDEX IF NOT EXISTS playlists_owner_idx ON playlists (owner ASC);
|
||||
|
||||
--rollback DROP INDEX IF EXISTS playlists_playlist_id_idx;
|
||||
--rollback DROP INDEX IF EXISTS playlists_owner_idx;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS unauthenticated_subscriptions_id_idx ON unauthenticated_subscriptions (id ASC);
|
||||
|
||||
--rollback DROP INDEX IF EXISTS unauthenticated_subscriptions_id_idx;
|
87
src/main/resources/changelog/version/0-1-init.sql
Normal file
87
src/main/resources/changelog/version/0-1-init.sql
Normal file
@ -0,0 +1,87 @@
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id BIGSERIAL NOT NULL,
|
||||
password TEXT NULL,
|
||||
session_id VARCHAR(36) NULL,
|
||||
username VARCHAR(24) NULL UNIQUE,
|
||||
CONSTRAINT users_pkey PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
DROP INDEX IF EXISTS users_id_idx;
|
||||
|
||||
CREATE INDEX IF NOT EXISTS username_idx ON users (username ASC);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS users;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS channels (
|
||||
uploader_id VARCHAR(24) NOT NULL,
|
||||
uploader VARCHAR(100) NULL,
|
||||
uploader_avatar VARCHAR(150) NULL,
|
||||
verified BOOL NULL,
|
||||
CONSTRAINT channels_pkey PRIMARY KEY (uploader_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS channels_uploader_idx ON channels (uploader ASC);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS channels;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS pubsub (
|
||||
id VARCHAR(24) NOT NULL,
|
||||
subbed_at INT8 NULL,
|
||||
CONSTRAINT pubsub_pkey PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS pubsub_id_idx ON pubsub (id ASC);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS pubsub;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS playlists (
|
||||
id BIGSERIAL NOT NULL,
|
||||
name VARCHAR(200) NULL,
|
||||
playlist_id UUID NOT NULL UNIQUE DEFAULT gen_random_uuid(),
|
||||
short_description VARCHAR(100) NULL,
|
||||
thumbnail VARCHAR(300) NULL,
|
||||
owner INT8 NOT NULL,
|
||||
CONSTRAINT playlists_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT fk_playlists_owner FOREIGN KEY (owner) REFERENCES users(id)
|
||||
);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS playlists;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS playlist_videos (
|
||||
id VARCHAR(11) NOT NULL,
|
||||
duration INT8 NULL,
|
||||
thumbnail VARCHAR(400) NULL,
|
||||
title VARCHAR(120) NULL,
|
||||
uploader_id VARCHAR(24) NOT NULL,
|
||||
CONSTRAINT playlist_videos_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT fk_playlist_video_uploader_id FOREIGN KEY (uploader_id) REFERENCES channels(uploader_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS playlist_videos_id_idx ON playlist_videos (id ASC);
|
||||
CREATE INDEX IF NOT EXISTS playlist_videos_uploader_id_idx ON playlist_videos (uploader_id ASC);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS playlist_videos;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS playlists_videos_ids (
|
||||
playlist_id INT8 NOT NULL,
|
||||
videos_id VARCHAR(11) NOT NULL,
|
||||
videos_order INT4 NOT NULL,
|
||||
CONSTRAINT playlists_videos_ids_pkey PRIMARY KEY (playlist_id, videos_order),
|
||||
CONSTRAINT fk_playlists_videos_video_id_playlist_video FOREIGN KEY (videos_id) REFERENCES playlist_videos(id),
|
||||
CONSTRAINT fk_playlists_videos_playlist_id_playlist FOREIGN KEY (playlist_id) REFERENCES playlists(id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS playlists_videos_ids_playlist_id_idx ON playlists_videos_ids (playlist_id ASC);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS playlists_videos_ids;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS unauthenticated_subscriptions (
|
||||
id VARCHAR(24) NOT NULL,
|
||||
subscribed_at INT8 NOT NULL,
|
||||
CONSTRAINT unauthenticated_subscriptions_pkey PRIMARY KEY (id),
|
||||
CONSTRAINT fk_unauthenticated_subscriptions_id_channels FOREIGN KEY (id) REFERENCES channels(uploader_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS unauthenticated_subscriptions_subscribed_at_idx ON unauthenticated_subscriptions (subscribed_at ASC);
|
||||
|
||||
--rollback DROP TABLE IF EXISTS unauthenticated_subscriptions;
|
17
src/main/resources/changelog/version/0-init.xml
Normal file
17
src/main/resources/changelog/version/0-init.xml
Normal file
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog
|
||||
https://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-latest.xsd">
|
||||
|
||||
<changeSet id="0-0" author="kavin" runInTransaction="false">
|
||||
<sqlFile path="0-0-init-yb.sql" dbms="yugabytedb" relativeToChangelogFile="true"/>
|
||||
</changeSet>
|
||||
|
||||
<changeSet id="0-1" author="kavin" runInTransaction="false">
|
||||
<sqlFile path="0-1-init.sql" relativeToChangelogFile="true"/>
|
||||
<sqlFile path="0-1-init-crdb.sql" dbms="cockroachdb" relativeToChangelogFile="true"/>
|
||||
<sqlFile path="0-1-init-pg.sql" dbms="postgresql,yugabytedb" relativeToChangelogFile="true"/>
|
||||
</changeSet>
|
||||
|
||||
</databaseChangeLog>
|
@ -4,11 +4,11 @@
|
||||
"http://www.hibernate.org/dtd/hibernate-configuration-3.0.dtd">
|
||||
<hibernate-configuration>
|
||||
<session-factory>
|
||||
<property name="hibernate.hbm2ddl.auto">update</property>
|
||||
<property name="hibernate.hbm2ddl.auto">validate</property>
|
||||
<!-- Optional: Show SQL output for debugging -->
|
||||
<property name="hibernate.show_sql">false</property>
|
||||
<property name="hibernate.format_sql">true</property>
|
||||
|
||||
|
||||
<property name="hibernate.connection.provider_class">org.hibernate.hikaricp.internal.HikariCPConnectionProvider</property>
|
||||
<property name="hibernate.connection.handling_mode">DELAYED_ACQUISITION_AND_RELEASE_AFTER_STATEMENT</property>
|
||||
<property name="hibernate.jdbc.batch_size">50</property>
|
||||
|
@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
CURLOPTS=(-i -s -S -o /dev/null -f -w "%{http_code}\tTime:\t%{time_starttransfer}\t%{url_effective}\n")
|
||||
CURLOPTS=(-i -s -S --max-time 60 -o /dev/null -f -w "%{http_code}\tTime:\t%{time_starttransfer}\t%{url_effective}\n")
|
||||
HOST="127.0.0.1:8080"
|
||||
|
||||
# Healthcheck Test
|
||||
|
@ -1,18 +0,0 @@
|
||||
# The port to Listen on.
|
||||
PORT: 8080
|
||||
|
||||
# Proxy
|
||||
PROXY_PART: https://pipedproxy-ams.kavin.rocks
|
||||
|
||||
# Public API URL
|
||||
API_URL: https://pipedapi.kavin.rocks
|
||||
|
||||
# Public Frontend URL
|
||||
FRONTEND_URL: https://piped.video
|
||||
|
||||
# Hibernate properties
|
||||
hibernate.connection.url: jdbc:hsqldb:mem:memdb;sql.syntax_pgs=true
|
||||
hibernate.connection.driver_class: org.hsqldb.jdbcDriver
|
||||
hibernate.dialect: org.hibernate.dialect.HSQLDialect
|
||||
hibernate.connection.username: piped
|
||||
hibernate.connection.password: changeme
|
@ -1,8 +0,0 @@
|
||||
services:
|
||||
piped:
|
||||
image: 1337kavin/piped:latest
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:8080:8080"
|
||||
volumes:
|
||||
- ./config.hsqldb.properties:/app/config.properties
|
Loading…
Reference in New Issue
Block a user