Initial Django Commit

This commit is contained in:
Luke Hubmayer-Werner 2024-12-18 20:57:06 +10:30
parent c4f5748963
commit d63c39f4d5
29 changed files with 1995 additions and 0 deletions

View File

@ -7,6 +7,7 @@ name = "pypi"
fugashi = "*" fugashi = "*"
unidic = "*" unidic = "*"
pykakasi = "*" pykakasi = "*"
django = "*"
[dev-packages] [dev-packages]

0
ktsite/__init__.py Normal file
View File

16
ktsite/asgi.py Normal file
View File

@ -0,0 +1,16 @@
"""
ASGI config for ktsite project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/5.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ktsite.settings')
application = get_asgi_application()

124
ktsite/settings.py Normal file
View File

@ -0,0 +1,124 @@
"""
Django settings for ktsite project.
Generated by 'django-admin startproject' using Django 5.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/5.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/5.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/5.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-p2(%ui_u8&s%#3wozb4n9hfao+ont5#az#g-5ze*i4-629bw2&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['topcez-lhw', '127.0.0.1']
# Application definition
INSTALLED_APPS = [
'lyrics',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ktsite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ktsite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/5.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/5.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/5.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/5.1/howto/static-files/
STATIC_URL = 'static/'
# Default primary key field type
# https://docs.djangoproject.com/en/5.1/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'

23
ktsite/urls.py Normal file
View File

@ -0,0 +1,23 @@
"""
URL configuration for ktsite project.
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/5.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('lyrics/', include('lyrics.urls')),
path('admin/', admin.site.urls),
]

16
ktsite/wsgi.py Normal file
View File

@ -0,0 +1,16 @@
"""
WSGI config for ktsite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/5.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ktsite.settings')
application = get_wsgi_application()

0
lyrics/__init__.py Normal file
View File

3
lyrics/admin.py Normal file
View File

@ -0,0 +1,3 @@
from django.contrib import admin
# Register your models here.

6
lyrics/apps.py Normal file
View File

@ -0,0 +1,6 @@
from django.apps import AppConfig
class LyricsConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'lyrics'

View File

3
lyrics/models.py Normal file
View File

@ -0,0 +1,3 @@
from django.db import models
# Create your models here.

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,526 @@
typeof HTMLVideoElement < "u" && !("requestVideoFrameCallback" in HTMLVideoElement.prototype) && "getVideoPlaybackQuality" in HTMLVideoElement.prototype && (HTMLVideoElement.prototype._rvfcpolyfillmap = {}, HTMLVideoElement.prototype.requestVideoFrameCallback = function(c) {
const e = performance.now(), t = this.getVideoPlaybackQuality(), s = this.mozPresentedFrames || this.mozPaintedFrames || t.totalVideoFrames - t.droppedVideoFrames, a = (n, r) => {
const i = this.getVideoPlaybackQuality(), o = this.mozPresentedFrames || this.mozPaintedFrames || i.totalVideoFrames - i.droppedVideoFrames;
if (o > s) {
const d = this.mozFrameDelay || i.totalFrameDelay - t.totalFrameDelay || 0, l = r - n;
c(r, {
presentationTime: r + d * 1e3,
expectedDisplayTime: r + l,
width: this.videoWidth,
height: this.videoHeight,
mediaTime: Math.max(0, this.currentTime || 0) + l / 1e3,
presentedFrames: o,
processingDuration: d
}), delete this._rvfcpolyfillmap[e];
} else
this._rvfcpolyfillmap[e] = requestAnimationFrame((d) => a(r, d));
};
return this._rvfcpolyfillmap[e] = requestAnimationFrame((n) => a(e, n)), e;
}, HTMLVideoElement.prototype.cancelVideoFrameCallback = function(c) {
cancelAnimationFrame(this._rvfcpolyfillmap[c]), delete this._rvfcpolyfillmap[c];
});
const _ = {
bt709: "BT709",
// these might not be exactly correct? oops?
bt470bg: "BT601",
// alias BT.601 PAL... whats the difference?
smpte170m: "BT601"
// alias BT.601 NTSC... whats the difference?
}, f = {
BT601: {
BT709: "1.0863 -0.0723 -0.014 0 0 0.0965 0.8451 0.0584 0 0 -0.0141 -0.0277 1.0418"
},
BT709: {
BT601: "0.9137 0.0784 0.0079 0 0 -0.1049 1.1722 -0.0671 0 0 0.0096 0.0322 0.9582"
},
FCC: {
BT709: "1.0873 -0.0736 -0.0137 0 0 0.0974 0.8494 0.0531 0 0 -0.0127 -0.0251 1.0378",
BT601: "1.001 -0.0008 -0.0002 0 0 0.0009 1.005 -0.006 0 0 0.0013 0.0027 0.996"
},
SMPTE240M: {
BT709: "0.9993 0.0006 0.0001 0 0 -0.0004 0.9812 0.0192 0 0 -0.0034 -0.0114 1.0148",
BT601: "0.913 0.0774 0.0096 0 0 -0.1051 1.1508 -0.0456 0 0 0.0063 0.0207 0.973"
}
};
class h extends EventTarget {
/**
* @param {Object} options Settings object.
* @param {HTMLVideoElement} options.video Video to use as target for rendering and event listeners. Optional if canvas is specified instead.
* @param {HTMLCanvasElement} [options.canvas=HTMLCanvasElement] Canvas to use for manual handling. Not required if video is specified.
* @param {'js'|'wasm'} [options.blendMode='js'] Which image blending mode to use. WASM will perform better on lower end devices, JS will perform better if the device and browser supports hardware acceleration.
* @param {Boolean} [options.asyncRender=true] Whether or not to use async rendering, which offloads the CPU by creating image bitmaps on the GPU.
* @param {Boolean} [options.offscreenRender=true] Whether or not to render things fully on the worker, greatly reduces CPU usage.
* @param {Boolean} [options.onDemandRender=true] Whether or not to render subtitles as the video player decodes renders, rather than predicting which frame the player is on using events.
* @param {Number} [options.targetFps=24] Target FPS to render subtitles at. Ignored when onDemandRender is enabled.
* @param {Number} [options.timeOffset=0] Subtitle time offset in seconds.
* @param {Boolean} [options.debug=false] Whether or not to print debug information.
* @param {Number} [options.prescaleFactor=1.0] Scale down (< 1.0) the subtitles canvas to improve performance at the expense of quality, or scale it up (> 1.0).
* @param {Number} [options.prescaleHeightLimit=1080] The height in pixels beyond which the subtitles canvas won't be prescaled.
* @param {Number} [options.maxRenderHeight=0] The maximum rendering height in pixels of the subtitles canvas. Beyond this subtitles will be upscaled by the browser.
* @param {Boolean} [options.dropAllAnimations=false] Attempt to discard all animated tags. Enabling this may severly mangle complex subtitles and should only be considered as an last ditch effort of uncertain success for hardware otherwise incapable of displaing anything. Will not reliably work with manually edited or allocated events.
* @param {Boolean} [options.dropAllBlur=false] The holy grail of performance gains. If heavy TS lags a lot, disabling this will make it ~x10 faster. This drops blur from all added subtitle tracks making most text and backgrounds look sharper, this is way less intrusive than dropping all animations, while still offering major performance gains.
* @param {String} [options.workerUrl='static/jass/jassub-worker.js'] The URL of the worker.
* @param {String} [options.wasmUrl='./jassub-worker.wasm'] The URL of the worker WASM.
* @param {String} [options.legacyWasmUrl='./jassub-worker.wasm.js'] The URL of the worker WASM. Only loaded if the browser doesn't support WASM.
* @param {String} options.modernWasmUrl The URL of the modern worker WASM. This includes faster ASM instructions, but is only supported by newer browsers, disabled if the URL isn't defined.
* @param {String} [options.subUrl=options.subContent] The URL of the subtitle file to play.
* @param {String} [options.subContent=options.subUrl] The content of the subtitle file to play.
* @param {String[]|Uint8Array[]} [options.fonts] An array of links or Uint8Arrays to the fonts used in the subtitle. If Uint8Array is used the array is copied, not referenced. This forces all the fonts in this array to be loaded by the renderer, regardless of if they are used.
* @param {Object} [options.availableFonts={'droid sans japanese': './DroidSansJapanese.ttf', 'noto sans japanese': 'NotoSansJPRegular.woff', 'liberation sans': './default.woff2'}] Object with all available fonts - Key is font family in lower case, value is link or Uint8Array: { arial: '/font1.ttf' }. These fonts are selectively loaded if detected as used in the current subtitle track.
* @param {String} [options.fallbackFont='liberation sans'] The font family key of the fallback font in availableFonts to use if the other font for the style is missing special glyphs or unicode.
* @param {Boolean} [options.useLocalFonts=false] If the Local Font Access API is enabled [chrome://flags/#font-access], the library will query for permissions to use local fonts and use them if any are missing. The permission can be queried beforehand using navigator.permissions.request({ name: 'local-fonts' }).
* @param {Number} [options.libassMemoryLimit] libass bitmap cache memory limit in MiB (approximate).
* @param {Number} [options.libassGlyphLimit] libass glyph cache memory limit in MiB (approximate).
*/
constructor(e) {
if (super(), !globalThis.Worker)
throw this.destroy("Worker not supported");
if (!e)
throw this.destroy("No options provided");
this._loaded = /** @type {Promise<void>} */
new Promise((s) => {
this._init = s;
});
const t = h._test();
if (this._onDemandRender = "requestVideoFrameCallback" in HTMLVideoElement.prototype && (e.onDemandRender ?? !0), this._offscreenRender = "transferControlToOffscreen" in HTMLCanvasElement.prototype && !e.canvas && (e.offscreenRender ?? !0), this.timeOffset = e.timeOffset || 0, this._video = e.video, this._videoHeight = 0, this._videoWidth = 0, this._videoColorSpace = null, this._canvas = e.canvas, this._video && !this._canvas)
this._canvasParent = document.createElement("div"), this._canvasParent.className = "JASSUB", this._canvasParent.style.position = "relative", this._canvas = this._createCanvas(), this._video.insertAdjacentElement("afterend", this._canvasParent);
else if (!this._canvas)
throw this.destroy("Don't know where to render: you should give video or canvas in options.");
if (this._bufferCanvas = document.createElement("canvas"), this._bufferCtx = this._bufferCanvas.getContext("2d"), !this._bufferCtx)
throw this.destroy("Canvas rendering not supported");
this._canvasctrl = this._offscreenRender ? this._canvas.transferControlToOffscreen() : this._canvas, this._ctx = !this._offscreenRender && this._canvasctrl.getContext("2d"), this._lastRenderTime = 0, this.debug = !!e.debug, this.prescaleFactor = e.prescaleFactor || 1, this.prescaleHeightLimit = e.prescaleHeightLimit || 1080, this.maxRenderHeight = e.maxRenderHeight || 0, this._boundResize = this.resize.bind(this), this._boundTimeUpdate = this._timeupdate.bind(this), this._boundSetRate = this.setRate.bind(this), this._boundUpdateColorSpace = this._updateColorSpace.bind(this), this._video && this.setVideo(e.video), this._onDemandRender && (this.busy = !1, this._lastDemandTime = null), this._worker = new Worker(e.workerUrl || "static/jass/jassub-worker.js"), this._worker.onmessage = (s) => this._onmessage(s), this._worker.onerror = (s) => this._error(s), t.then(() => {
this._worker.postMessage({
target: "init",
wasmUrl: h._supportsSIMD && e.modernWasmUrl ? e.modernWasmUrl : e.wasmUrl || "./jassub-worker.wasm",
legacyWasmUrl: e.legacyWasmUrl || "./jassub-worker.wasm.js",
asyncRender: typeof createImageBitmap < "u" && (e.asyncRender ?? !0),
onDemandRender: this._onDemandRender,
width: this._canvasctrl.width || 0,
height: this._canvasctrl.height || 0,
blendMode: e.blendMode || "js",
subUrl: e.subUrl,
subContent: e.subContent || null,
fonts: e.fonts || [],
availableFonts: e.availableFonts || { 'droid sans japanese': './DroidSansJapanese.ttf', 'noto sans japanese': 'NotoSansJPRegular.woff', 'liberation sans': './default.woff2' },
fallbackFont: e.fallbackFont || "liberation sans",
debug: this.debug,
targetFps: e.targetFps || 24,
dropAllAnimations: e.dropAllAnimations,
dropAllBlur: e.dropAllBlur,
libassMemoryLimit: e.libassMemoryLimit || 0,
libassGlyphLimit: e.libassGlyphLimit || 0,
// @ts-ignore
useLocalFonts: typeof queryLocalFonts < "u" && (e.useLocalFonts ?? !0),
hasBitmapBug: h._hasBitmapBug
}), this._offscreenRender === !0 && this.sendMessage("offscreenCanvas", null, [this._canvasctrl]);
});
}
_createCanvas() {
return this._canvas = document.createElement("canvas"), this._canvas.style.display = "block", this._canvas.style.position = "absolute", this._canvas.style.pointerEvents = "none", this._canvasParent.appendChild(this._canvas), this._canvas;
}
// test support for WASM, ImageData, alphaBug, but only once, on init so it doesn't run when first running the page
/** @type {boolean|null} */
static _supportsSIMD = null;
/** @type {boolean|null} */
static _hasAlphaBug = null;
/** @type {boolean|null} */
static _hasBitmapBug = null;
static async _test() {
if (h._hasBitmapBug !== null)
return null;
try {
h._supportsSIMD = WebAssembly.validate(Uint8Array.of(0, 97, 115, 109, 1, 0, 0, 0, 1, 5, 1, 96, 0, 1, 123, 3, 2, 1, 0, 10, 10, 1, 8, 0, 65, 0, 253, 15, 253, 98, 11));
} catch {
h._supportsSIMD = !1;
}
const e = document.createElement("canvas"), t = e.getContext("2d", { willReadFrequently: !0 });
if (!t)
throw new Error("Canvas rendering not supported");
if (typeof ImageData.prototype.constructor == "function")
try {
new ImageData(new Uint8ClampedArray([0, 0, 0, 0]), 1, 1);
} catch {
console.log("Detected that ImageData is not constructable despite browser saying so"), self.ImageData = function(o, d, l) {
const m = t.createImageData(d, l);
return o && m.data.set(o), m;
};
}
const s = document.createElement("canvas"), a = s.getContext("2d", { willReadFrequently: !0 });
if (!a)
throw new Error("Canvas rendering not supported");
e.width = s.width = 1, e.height = s.height = 1, t.clearRect(0, 0, 1, 1), a.clearRect(0, 0, 1, 1);
const n = a.getImageData(0, 0, 1, 1).data;
t.putImageData(new ImageData(new Uint8ClampedArray([0, 255, 0, 0]), 1, 1), 0, 0), a.drawImage(e, 0, 0);
const r = a.getImageData(0, 0, 1, 1).data;
if (h._hasAlphaBug = n[1] !== r[1], h._hasAlphaBug && console.log("Detected a browser having issue with transparent pixels, applying workaround"), typeof createImageBitmap < "u") {
const i = new Uint8ClampedArray([255, 0, 255, 0, 255]).subarray(1, 5);
a.drawImage(await createImageBitmap(new ImageData(i, 1)), 0, 0);
const { data: o } = a.getImageData(0, 0, 1, 1);
h._hasBitmapBug = !1;
for (const [d, l] of o.entries())
if (Math.abs(i[d] - l) > 15) {
h._hasBitmapBug = !0, console.log("Detected a browser having issue with partial bitmaps, applying workaround");
break;
}
} else
h._hasBitmapBug = !1;
e.remove(), s.remove();
}
/**
* Resize the canvas to given parameters. Auto-generated if values are ommited.
* @param {Number} [width=0]
* @param {Number} [height=0]
* @param {Number} [top=0]
* @param {Number} [left=0]
* @param {Boolean} [force=false]
*/
resize(e = 0, t = 0, s = 0, a = 0, n = this._video?.paused) {
if ((!e || !t) && this._video) {
const r = this._getVideoPosition();
let i = null;
if (this._videoWidth) {
const o = this._video.videoWidth / this._videoWidth, d = this._video.videoHeight / this._videoHeight;
i = this._computeCanvasSize((r.width || 0) / o, (r.height || 0) / d);
} else
i = this._computeCanvasSize(r.width || 0, r.height || 0);
e = i.width, t = i.height, this._canvasParent && (s = r.y - (this._canvasParent.getBoundingClientRect().top - this._video.getBoundingClientRect().top), a = r.x), this._canvas.style.width = r.width + "px", this._canvas.style.height = r.height + "px";
}
this._canvas.style.top = s + "px", this._canvas.style.left = a + "px", n && this.busy === !1 ? this.busy = !0 : n = !1, this.sendMessage("canvas", { width: e, height: t, force: n });
}
_getVideoPosition(e = this._video.videoWidth, t = this._video.videoHeight) {
const s = e / t, { offsetWidth: a, offsetHeight: n } = this._video, r = a / n;
e = a, t = n, r > s ? e = Math.floor(n * s) : t = Math.floor(a / s);
const i = (a - e) / 2, o = (n - t) / 2;
return { width: e, height: t, x: i, y: o };
}
_computeCanvasSize(e = 0, t = 0) {
const s = this.prescaleFactor <= 0 ? 1 : this.prescaleFactor, a = self.devicePixelRatio || 1;
if (e = e * a, t = t * a, t <= 0 || e <= 0)
e = 0, t = 0;
else {
const n = s < 1 ? -1 : 1;
let r = t * a;
n * r * s <= n * this.prescaleHeightLimit ? r *= s : n * r < n * this.prescaleHeightLimit && (r = this.prescaleHeightLimit), this.maxRenderHeight > 0 && r > this.maxRenderHeight && (r = this.maxRenderHeight), e *= r / t, t = r;
}
return { width: e, height: t };
}
_timeupdate({ type: e }) {
const s = {
seeking: !0,
waiting: !0,
playing: !1
}[e];
s != null && (this._playstate = s), this.setCurrentTime(this._video.paused || this._playstate, this._video.currentTime + this.timeOffset);
}
/**
* Change the video to use as target for event listeners.
* @param {HTMLVideoElement} video
*/
setVideo(e) {
e instanceof HTMLVideoElement ? (this._removeListeners(), this._video = e, this._onDemandRender ? this._video.requestVideoFrameCallback(this._handleRVFC.bind(this)) : (this._playstate = e.paused, e.addEventListener("timeupdate", this._boundTimeUpdate, !1), e.addEventListener("progress", this._boundTimeUpdate, !1), e.addEventListener("waiting", this._boundTimeUpdate, !1), e.addEventListener("seeking", this._boundTimeUpdate, !1), e.addEventListener("playing", this._boundTimeUpdate, !1), e.addEventListener("ratechange", this._boundSetRate, !1), e.addEventListener("resize", this._boundResize, !1)), "VideoFrame" in window && (e.addEventListener("loadedmetadata", this._boundUpdateColorSpace, !1), e.readyState > 2 && this._updateColorSpace()), e.videoWidth > 0 && this.resize(), typeof ResizeObserver < "u" && (this._ro || (this._ro = new ResizeObserver(() => this.resize())), this._ro.observe(e))) : this._error("Video element invalid!");
}
runBenchmark() {
this.sendMessage("runBenchmark");
}
/**
* Overwrites the current subtitle content.
* @param {String} url URL to load subtitles from.
*/
setTrackByUrl(e) {
this.sendMessage("setTrackByUrl", { url: e }), this._reAttachOffscreen(), this._ctx && (this._ctx.filter = "none");
}
/**
* Overwrites the current subtitle content.
* @param {String} content Content of the ASS file.
*/
setTrack(e) {
this.sendMessage("setTrack", { content: e }), this._reAttachOffscreen(), this._ctx && (this._ctx.filter = "none");
}
/**
* Free currently used subtitle track.
*/
freeTrack() {
this.sendMessage("freeTrack");
}
/**
* Sets the playback state of the media.
* @param {Boolean} isPaused Pause/Play subtitle playback.
*/
setIsPaused(e) {
this.sendMessage("video", { isPaused: e });
}
/**
* Sets the playback rate of the media [speed multiplier].
* @param {Number} rate Playback rate.
*/
setRate(e) {
this.sendMessage("video", { rate: e });
}
/**
* Sets the current time, playback state and rate of the subtitles.
* @param {Boolean} [isPaused] Pause/Play subtitle playback.
* @param {Number} [currentTime] Time in seconds.
* @param {Number} [rate] Playback rate.
*/
setCurrentTime(e, t, s) {
this.sendMessage("video", { isPaused: e, currentTime: t, rate: s, colorSpace: this._videoColorSpace });
}
/**
* @typedef {Object} ASS_Event
* @property {Number} Start Start Time of the Event, in 0:00:00:00 format ie. Hrs:Mins:Secs:hundredths. This is the time elapsed during script playback at which the text will appear onscreen. Note that there is a single digit for the hours!
* @property {Number} Duration End Time of the Event, in 0:00:00:00 format ie. Hrs:Mins:Secs:hundredths. This is the time elapsed during script playback at which the text will disappear offscreen. Note that there is a single digit for the hours!
* @property {String} Style Style name. If it is "Default", then your own *Default style will be subtituted.
* @property {String} Name Character name. This is the name of the character who speaks the dialogue. It is for information only, to make the script is easier to follow when editing/timing.
* @property {Number} MarginL 4-figure Left Margin override. The values are in pixels. All zeroes means the default margins defined by the style are used.
* @property {Number} MarginR 4-figure Right Margin override. The values are in pixels. All zeroes means the default margins defined by the style are used.
* @property {Number} MarginV 4-figure Bottom Margin override. The values are in pixels. All zeroes means the default margins defined by the style are used.
* @property {String} Effect Transition Effect. This is either empty, or contains information for one of the three transition effects implemented in SSA v4.x
* @property {String} Text Subtitle Text. This is the actual text which will be displayed as a subtitle onscreen. Everything after the 9th comma is treated as the subtitle text, so it can include commas.
* @property {Number} ReadOrder Number in order of which to read this event.
* @property {Number} Layer Z-index overlap in which to render this event.
* @property {Number} _index (Internal) index of the event.
*/
/**
* Create a new ASS event directly.
* @param {ASS_Event} event
*/
createEvent(e) {
this.sendMessage("createEvent", { event: e });
}
/**
* Overwrite the data of the event with the specified index.
* @param {ASS_Event} event
* @param {Number} index
*/
setEvent(e, t) {
this.sendMessage("setEvent", { event: e, index: t });
}
/**
* Remove the event with the specified index.
* @param {Number} index
*/
removeEvent(e) {
this.sendMessage("removeEvent", { index: e });
}
/**
* Get all ASS events.
* @param {function(Error|null, ASS_Event): void} callback Function to callback when worker returns the events.
*/
getEvents(e) {
this._fetchFromWorker({
target: "getEvents"
}, (t, { events: s }) => {
e(t, s);
});
}
/**
* @typedef {Object} ASS_Style
* @property {String} Name The name of the Style. Case sensitive. Cannot include commas.
* @property {String} FontName The fontname as used by Windows. Case-sensitive.
* @property {Number} FontSize Font size.
* @property {Number} PrimaryColour A long integer BGR (blue-green-red) value. ie. the byte order in the hexadecimal equivelent of this number is BBGGRR
* @property {Number} SecondaryColour A long integer BGR (blue-green-red) value. ie. the byte order in the hexadecimal equivelent of this number is BBGGRR
* @property {Number} OutlineColour A long integer BGR (blue-green-red) value. ie. the byte order in the hexadecimal equivelent of this number is BBGGRR
* @property {Number} BackColour This is the colour of the subtitle outline or shadow, if these are used. A long integer BGR (blue-green-red) value. ie. the byte order in the hexadecimal equivelent of this number is BBGGRR.
* @property {Number} Bold This defines whether text is bold (true) or not (false). -1 is True, 0 is False. This is independant of the Italic attribute - you can have have text which is both bold and italic.
* @property {Number} Italic Italic. This defines whether text is italic (true) or not (false). -1 is True, 0 is False. This is independant of the bold attribute - you can have have text which is both bold and italic.
* @property {Number} Underline -1 or 0
* @property {Number} StrikeOut -1 or 0
* @property {Number} ScaleX Modifies the width of the font. [percent]
* @property {Number} ScaleY Modifies the height of the font. [percent]
* @property {Number} Spacing Extra space between characters. [pixels]
* @property {Number} Angle The origin of the rotation is defined by the alignment. Can be a floating point number. [degrees]
* @property {Number} BorderStyle 1=Outline + drop shadow, 3=Opaque box
* @property {Number} Outline If BorderStyle is 1, then this specifies the width of the outline around the text, in pixels. Values may be 0, 1, 2, 3 or 4.
* @property {Number} Shadow If BorderStyle is 1, then this specifies the depth of the drop shadow behind the text, in pixels. Values may be 0, 1, 2, 3 or 4. Drop shadow is always used in addition to an outline - SSA will force an outline of 1 pixel if no outline width is given.
* @property {Number} Alignment This sets how text is "justified" within the Left/Right onscreen margins, and also the vertical placing. Values may be 1=Left, 2=Centered, 3=Right. Add 4 to the value for a "Toptitle". Add 8 to the value for a "Midtitle". eg. 5 = left-justified toptitle
* @property {Number} MarginL This defines the Left Margin in pixels. It is the distance from the left-hand edge of the screen.The three onscreen margins (MarginL, MarginR, MarginV) define areas in which the subtitle text will be displayed.
* @property {Number} MarginR This defines the Right Margin in pixels. It is the distance from the right-hand edge of the screen. The three onscreen margins (MarginL, MarginR, MarginV) define areas in which the subtitle text will be displayed.
* @property {Number} MarginV This defines the vertical Left Margin in pixels. For a subtitle, it is the distance from the bottom of the screen. For a toptitle, it is the distance from the top of the screen. For a midtitle, the value is ignored - the text will be vertically centred.
* @property {Number} Encoding This specifies the font character set or encoding and on multi-lingual Windows installations it provides access to characters used in multiple than one languages. It is usually 0 (zero) for English (Western, ANSI) Windows.
* @property {Number} treat_fontname_as_pattern
* @property {Number} Blur
* @property {Number} Justify
*/
/**
* Create a new ASS style directly.
* @param {ASS_Style} style
*/
createStyle(e) {
this.sendMessage("createStyle", { style: e });
}
/**
* Overwrite the data of the style with the specified index.
* @param {ASS_Style} style
* @param {Number} index
*/
setStyle(e, t) {
this.sendMessage("setStyle", { style: e, index: t });
}
/**
* Remove the style with the specified index.
* @param {Number} index
*/
removeStyle(e) {
this.sendMessage("removeStyle", { index: e });
}
/**
* Get all ASS styles.
* @param {function(Error|null, ASS_Style): void} callback Function to callback when worker returns the styles.
*/
getStyles(e) {
this._fetchFromWorker({
target: "getStyles"
}, (t, { styles: s }) => {
e(t, s);
});
}
/**
* Adds a font to the renderer.
* @param {String|Uint8Array} font Font to add.
*/
addFont(e) {
this.sendMessage("addFont", { font: e });
}
_sendLocalFont(e) {
try {
queryLocalFonts().then((t) => {
const s = t?.find((a) => a.fullName.toLowerCase() === e);
s && s.blob().then((a) => {
a.arrayBuffer().then((n) => {
this.addFont(new Uint8Array(n));
});
});
});
} catch (t) {
console.warn("Local fonts API:", t);
}
}
_getLocalFont({ font: e }) {
try {
navigator?.permissions?.query ? navigator.permissions.query({ name: "local-fonts" }).then((t) => {
t.state === "granted" && this._sendLocalFont(e);
}) : this._sendLocalFont(e);
} catch (t) {
console.warn("Local fonts API:", t);
}
}
_unbusy() {
this._lastDemandTime ? this._demandRender(this._lastDemandTime) : this.busy = !1;
}
_handleRVFC(e, { mediaTime: t, width: s, height: a }) {
if (this._destroyed)
return null;
this.busy ? this._lastDemandTime = { mediaTime: t, width: s, height: a } : (this.busy = !0, this._demandRender({ mediaTime: t, width: s, height: a })), this._video.requestVideoFrameCallback(this._handleRVFC.bind(this));
}
_demandRender({ mediaTime: e, width: t, height: s }) {
this._lastDemandTime = null, (t !== this._videoWidth || s !== this._videoHeight) && (this._videoWidth = t, this._videoHeight = s, this.resize()), this.sendMessage("demand", { time: e + this.timeOffset });
}
// if we're using offscreen render, we can't use ctx filters, so we can't use a transfered canvas
_detachOffscreen() {
if (!this._offscreenRender || this._ctx)
return null;
this._canvas.remove(), this._createCanvas(), this._canvasctrl = this._canvas, this._ctx = this._canvasctrl.getContext("2d"), this.sendMessage("detachOffscreen"), this.busy = !1, this.resize(0, 0, 0, 0, !0);
}
// if the video or track changed, we need to re-attach the offscreen canvas
_reAttachOffscreen() {
if (!this._offscreenRender || !this._ctx)
return null;
this._canvas.remove(), this._createCanvas(), this._canvasctrl = this._canvas.transferControlToOffscreen(), this._ctx = !1, this.sendMessage("offscreenCanvas", null, [this._canvasctrl]), this.resize(0, 0, 0, 0, !0);
}
_updateColorSpace() {
this._video.requestVideoFrameCallback(() => {
try {
const e = new VideoFrame(this._video);
this._videoColorSpace = _[e.colorSpace.matrix], e.close(), this.sendMessage("getColorSpace");
} catch (e) {
console.warn(e);
}
});
}
/**
* Veryify the color spaces for subtitles and videos, then apply filters to correct the color of subtitles.
* @param {Object} options
* @param {String} options.subtitleColorSpace Subtitle color space. One of: BT601 BT709 SMPTE240M FCC
* @param {String=} options.videoColorSpace Video color space. One of: BT601 BT709
*/
_verifyColorSpace({ subtitleColorSpace: e, videoColorSpace: t = this._videoColorSpace }) {
!e || !t || e !== t && (this._detachOffscreen(), this._ctx.filter = `url("data:image/svg+xml;utf8,<svg xmlns='http://www.w3.org/2000/svg'><filter id='f'><feColorMatrix type='matrix' values='${f[e][t]} 0 0 0 0 0 1 0'/></filter></svg>#f")`);
}
_render({ images: e, asyncRender: t, times: s, width: a, height: n, colorSpace: r }) {
this._unbusy(), this.debug && (s.IPCTime = Date.now() - s.JSRenderTime), (this._canvasctrl.width !== a || this._canvasctrl.height !== n) && (this._canvasctrl.width = a, this._canvasctrl.height = n, this._verifyColorSpace({ subtitleColorSpace: r })), this._ctx.clearRect(0, 0, this._canvasctrl.width, this._canvasctrl.height);
for (const i of e)
i.image && (t ? (this._ctx.drawImage(i.image, i.x, i.y), i.image.close()) : (this._bufferCanvas.width = i.w, this._bufferCanvas.height = i.h, this._bufferCtx.putImageData(new ImageData(this._fixAlpha(new Uint8ClampedArray(i.image)), i.w, i.h), 0, 0), this._ctx.drawImage(this._bufferCanvas, i.x, i.y)));
if (this.debug) {
s.JSRenderTime = Date.now() - s.JSRenderTime - s.IPCTime;
let i = 0;
const o = s.bitmaps || e.length;
delete s.bitmaps;
for (const d in s)
i += s[d];
console.log("Bitmaps: " + o + " Total: " + (i | 0) + "ms", s);
}
}
_fixAlpha(e) {
if (h._hasAlphaBug)
for (let t = 3; t < e.length; t += 4)
e[t] = e[t] > 1 ? e[t] : 1;
return e;
}
_ready() {
this._init(), this.dispatchEvent(new CustomEvent("ready"));
}
/**
* Send data and execute function in the worker.
* @param {String} target Target function.
* @param {Object} [data] Data for function.
* @param {Transferable[]} [transferable] Array of transferables.
*/
async sendMessage(e, t = {}, s) {
await this._loaded, s ? this._worker.postMessage({
target: e,
transferable: s,
...t
}, [...s]) : this._worker.postMessage({
target: e,
...t
});
}
_fetchFromWorker(e, t) {
try {
const s = e.target, a = setTimeout(() => {
r(new Error("Error: Timeout while try to fetch " + s));
}, 5e3), n = ({ data: i }) => {
i.target === s && (t(null, i), this._worker.removeEventListener("message", n), this._worker.removeEventListener("error", r), clearTimeout(a));
}, r = (i) => {
t(i), this._worker.removeEventListener("message", n), this._worker.removeEventListener("error", r), clearTimeout(a);
};
this._worker.addEventListener("message", n), this._worker.addEventListener("error", r), this._worker.postMessage(e);
} catch (s) {
this._error(s);
}
}
_console({ content: e, command: t }) {
console[t].apply(console, JSON.parse(e));
}
_onmessage({ data: e }) {
this["_" + e.target] && this["_" + e.target](e);
}
_error(e) {
const t = e instanceof Error ? e : e instanceof ErrorEvent ? e.error : new Error(e), s = e instanceof Event ? new ErrorEvent(e.type, e) : new ErrorEvent("error", { error: t });
return this.dispatchEvent(s), console.error(t), t;
}
_removeListeners() {
this._video && (this._ro && this._ro.unobserve(this._video), this._ctx && (this._ctx.filter = "none"), this._video.removeEventListener("timeupdate", this._boundTimeUpdate), this._video.removeEventListener("progress", this._boundTimeUpdate), this._video.removeEventListener("waiting", this._boundTimeUpdate), this._video.removeEventListener("seeking", this._boundTimeUpdate), this._video.removeEventListener("playing", this._boundTimeUpdate), this._video.removeEventListener("ratechange", this._boundSetRate), this._video.removeEventListener("resize", this._boundResize), this._video.removeEventListener("loadedmetadata", this._boundUpdateColorSpace));
}
/**
* Destroy the object, worker, listeners and all data.
* @param {String|Error} [err] Error to throw when destroying.
*/
destroy(e) {
return e && (e = this._error(e)), this._video && this._canvasParent && this._video.parentNode?.removeChild(this._canvasParent), this._destroyed = !0, this._removeListeners(), this.sendMessage("destroy"), this._worker?.terminate(), e;
}
}
export {
h as default
};

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,72 @@
const update_lyrics_keys = new Set(['Enter', 'ArrowDown', 'ArrowUp']);
const string_replacements_element = document.querySelector('#word_replacements_pre');
const word_overrides_element = document.querySelector('#word_replacements_post');
const lyrics_input_element = document.querySelector('#lyrics_input_textarea');
const lyrics_output_element = document.querySelector('#lyrics_output');
const req_tokenization_url = './tokenize'
// let has_lyrics_changed = false
let tokenized_lyric_lines = []
function update_lyrics_output(data) {
tokenized_lyric_lines = data.parsed_lines;
var html = ''
tokenized_lyric_lines.forEach(line => {
if ((typeof line) == "string") {
console.log(`${line} is a string`)
html += line;
} else {
console.log(`${line} is not a string`)
// Tokenized object
html += line.romaji_syllables.join('');
html += '<br>';
html += '<span style="font-size: 32px;font-family: Droid Sans Japanese">';
line.furi_blocks.forEach(block => {
if (block[1]) {
html += `<ruby>${block[0]}<rt>${block[1]}</rt></ruby>`;
} else {
html += block[0];
}
});
html += '</span>';
}
html += '<br>';
});
lyrics_output_element.innerHTML = html;
console.log(`updating lyrics: ${tokenized_lyric_lines}`);
}
function lyrics_input_updated() {
const content = lyrics_input_element.value;
const pre_replacements = string_replacements_element.value;
const post_replacements = word_overrides_element.value;
console.log(`preparing to update lyrics: ${content}`);
let content_replaced = content
pre_replacements.split('\n').forEach(element => {
console.log(element);
const [search, replace] = element.replace('|', '\t').split('\t');
if (search) content_replaced = content_replaced.replaceAll(search, replace)
});
console.log(`preparing to request tokenization of lyrics: ${content_replaced}`);
const input = encodeURIComponent(content_replaced)
const wo = encodeURIComponent(post_replacements)
const req = new Request(`${req_tokenization_url}?input=${input}&word_overrides=${wo}`, {method: "GET"})
// const lines = content_replaced.split('\n');
fetch(req)
.then((rsp) => {
if (rsp.status === 200) {
return rsp.json();
} else {
throw new Error("API error!");
}
})
.then((rsp) => {
console.debug(rsp);
update_lyrics_output(rsp);
})
.catch((error) => {
console.error(error);
})
}
// lyrics_input_element.addEventListener('keyup', (event) => {if (update_lyrics_keys.has(event.key)) lyrics_input_updated();});
lyrics_input_element.addEventListener('change', lyrics_input_updated)

View File

@ -0,0 +1,58 @@
video, input {
display: block;
}
input {
width: 100%;
}
.info {
background-color: aqua;
}
.error {
background-color: red;
color: white;
}
#song_information {
min-width: 400px;
}
#word_overrides {
min-width: 400px;
max-width: 800px;
}
#lyrics_output_outer {
min-width: 600px;
}
#lyrics_output {
background-color: beige;
/* border-color: grey; */
border-color: #47c7ce;
border-width: 1px;
border-radius: 5px;
padding: 5px;
border-style: solid;
}
#arrangement_input {
max-width: 260px;
}
.flex-container {
display: flex;
flex-flow: wrap;
background-color: aliceblue;
}
.flex-container > div {
background-color: aqua;
margin: 10px;
border-radius: 10px;
padding: 20px;
padding-top: 10px;
}
.flex-container > div > h2 {
margin-top: 0;
margin-bottom: 10px;
}

View File

@ -0,0 +1,40 @@
import JASSUB from '../jass/jassub.es.js'
(function localFileVideoPlayer() {
'use strict'
var URL = window.URL || window.webkitURL
var displayMessage = function (message, isError) {
var element = document.querySelector('#message')
element.innerHTML = message
element.className = isError ? 'error' : 'info'
}
var playSelectedFile = function (event) {
var file = this.files[0]
var type = file.type
var videoNode = document.querySelector('video')
var canPlay = videoNode.canPlayType(type)
if (canPlay === '') canPlay = 'no'
var message = 'Can play type "' + type + '": ' + canPlay
var isError = canPlay === 'no'
displayMessage(message, isError)
if (isError) {
return
}
var fileURL = URL.createObjectURL(file)
videoNode.src = fileURL
}
var inputNode = document.querySelector('input')
inputNode.addEventListener('change', playSelectedFile, false)
})()
// import font from '../jass/DroidSansJapanese.ttf'
const renderer = new JASSUB({
video: document.querySelector('video'),
subUrl: '../test.ass',
fonts: [],
// fallbackFont: 'liberation sans',
// fallbackFont: 'Droid Sans Japanese',
// availableFonts: {'DroidSansJapanese': './DroidSansJapanese.ttf'}
})

View File

@ -0,0 +1,85 @@
{% load static %}
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>Lyric Assistant</title>
<link rel="stylesheet" href="{% static 'lyrics/style.css' %}">
<script type="module" src="{% static 'lyrics/input.js' %}" defer></script>
<script type="module" src="{% static 'lyrics/video.js' %}" defer></script>
</head>
<body>
<h1>カラオケ手伝いLyric Assistant</h1>
<div class="flex-container">
<!-- Title and credits -->
<div id="song_information">
<h2>Song Information</h2>
<!-- TODO: split these into japanese and english fields -->
<table>
<tr>
<td><label for="song_title">曲名Song title:</label></td>
<td><input type="text" id="song_title" placeholder="曲名Song title"></td>
</tr>
<tr>
<td><label for="song_lyricist">作詞Song lyricist:</label></td>
<td><input type="text" id="song_lyricist" placeholder="作詞Song lyricist"></td>
</tr>
<tr>
<td><label for="song_composer">作曲Song composer:</label></td>
<td><input type="text" id="song_composer" placeholder="作曲Song composer"></td>
</tr>
</table>
<!-- TODO: decide if this gets its own div -->
<br><h2>Save/Load</h2>
<button>Save Song</button>
<button>Load Song</button>
</div>
<!-- Word Overrides -->
<div id="word_overrides">
<h2>Custom Word Overrides</h2>
<details>The automatic furigana generation uses MeCab/Unidic to parse/tokenize the original japanese text.
This has some word priorities that are bad in general, e.g. <ruby><rt>わたくし</rt></ruby> and <ruby><rt>ぬし</rt></ruby>, which this program overrides to <ruby><rt>わたし</rt></ruby> and <ruby><rt>しゅ</rt></ruby> by default, but you may also want to define your own kana readings for other words that are used a lot in this song.<br>
For individual words, you can instead specify a reading within the lyrics by writing <code>{漢字|かんじ}</code> which will prevent the automatic parser from touching it, and produce <ruby>漢字<rt>かんじ</rt></ruby> in the output.</details>
<table>
<tr>
<td><label for="word_replacements_pre"><details><summary>String Replacements</summary>With <i><b>String Replacements</b></i> you can specify a list of search+replace operations to run on the input <i>before</i> the tokenizer is run on it. This means you can force a word to always expand into the above manual furigana notation, and you can even use special characters to distinguish the input pattern from other instances of the kanji!</details></label></td>
<td><label for="word_replacements_post"><details><summary>Word Overrides</summary>With <i><b>Word Overrides</b></i> you can specify a list of word tokens to replace the readings of <i>after</i> the tokenizer has decided that exact sequence is a word. The format is <code>word[|old_reading]|my_reading</code>, e.g. <code>私|わたし</code> to replace any instance of 私, or <code>主|ぬし|しゅ</code> to only replace instances of 主 that have been parsed as ぬし (e.g. it would leave <ruby><rt>あるじ</rt></ruby> alone.)</details></label></td>
</tr>
<tr>
<td><textarea id="word_replacements_pre" name="word_replacements_pre" rows="4" cols="30" placeholder="hello|おはよう"></textarea></td>
<td><textarea id="word_replacements_post" name="word_replacements_post" rows="4" cols="30" placeholder="私|わたくし|わたし&#10;主|しゅ"></textarea></td>
</tr>
</table>
</div>
<!-- Lyrics input -->
<div id="lyrics_input">
<h2>Lyrics Input</h2>
<textarea id="lyrics_input_textarea" rows="40" cols="80" placeholder="[v1]&#10;おはよう世界"></textarea>
</div>
<!-- Lyrics output -->
<div id="lyrics_output_outer">
<h2>Lyrics Output</h2>
<div id="lyrics_output">
</div>
</div>
<!-- Arrangement input -->
<div id="arrangement_input">
<h2>Arrangement Input</h2>
Comma-separated section tags, numbers are beats to insert between sections.<br>
<textarea id="arrangement" rows="4" cols="30" wrap="soft" placeholder="v1, c, 8, v2, c, c, 16, b, c"></textarea>
</div>
<!-- Embedded video player for subtitle timing and preview -->
<div id="embedded_video">
<h2>Video</h2>
You may select a local video file for subtitle timing and preview. It will not be uploaded anywhere.
<div id="message"></div>
<input type="file" accept="video/*"/>
<video controls autoplay width="640"></video>
</div>
</div>
</body>
</html>

3
lyrics/tests.py Normal file
View File

@ -0,0 +1,3 @@
from django.test import TestCase
# Create your tests here.

7
lyrics/urls.py Normal file
View File

@ -0,0 +1,7 @@
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('tokenize', views.tokenize, name='tokenize'),
]

21
lyrics/views.py Normal file
View File

@ -0,0 +1,21 @@
from django.shortcuts import render
from django.http import HttpRequest, JsonResponse
from japanese_converters import parse_japanese_line
# Create your views here.
def index(request: HttpRequest):
# return HttpResponse("Hello world. You're at the Lyrics app index.")
return render(request, 'lyrics/index.html', {})
def tokenize(request: HttpRequest):
if 'input' in request.GET:
overrides = request.GET.get('word_overrides', '')
lines = request.GET.get('input').splitlines()
parsed = []
for line in lines:
if (line.strip() and not line.startswith('[')):
parsed.append(parse_japanese_line(line, overrides_str=overrides))
else:
parsed.append(line)
return JsonResponse({'parsed_lines': parsed})
return JsonResponse(request.GET)

22
manage.py Executable file
View File

@ -0,0 +1,22 @@
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ktsite.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()