Deno worker support

This commit is contained in:
Ben Allfree 2023-01-03 14:09:03 -08:00
parent b892d86947
commit 65b963e09d
56 changed files with 2854 additions and 178 deletions

View File

@ -1,8 +1,6 @@
PUBLIC_APP_PROTOCOL=https
PUBLIC_APP_DOMAIN=pockethost.test
PUBLIC_PB_PROTOCOL=https
PUBLIC_PB_DOMAIN=pockethost.test
PUBLIC_PB_SUBDOMAIN=pockethost-central
PUBLIC_APP_DB=pockethost-central
DAEMON_PB_BIN_DIR=`pwd`/packages/pocketbase/dist
DAEMON_PB_DATA_DIR=`pwd`/.data
DAEMON_PB_USERNAME=#ADDME
@ -16,4 +14,5 @@ SSL_CERT=`pwd`/ssl/pockethost.test.crt
PH_BIN_CACHE=`pwd`/.pbincache
PH_FTP_PASV_IP=0.0.0.0
PH_FTP_PASV_PORT_MIN=10000
PH_FTP_PASV_PORT_MAX=20000
PH_FTP_PASV_PORT_MAX=20000
DENO_PATH=deno

View File

@ -3,6 +3,7 @@ import { mkSingleton } from './mkSingleton'
export type Config = {
raw?: boolean
debug: boolean
trace?: boolean
errorTrace?: boolean
pfx?: string[]
}
@ -17,7 +18,8 @@ export const createLogger = (config: Partial<Config>) => {
const _config: Required<Config> = {
raw: false,
debug: true,
errorTrace: true,
trace: false,
errorTrace: false,
pfx: [''],
...config,
}
@ -57,7 +59,7 @@ export const createLogger = (config: Partial<Config>) => {
}
const trace = (...args: any[]) => {
_log(true, _pfx(`TRACE`), ...args)
_log(_config.trace, _pfx(`TRACE`), ...args)
}
const error = (...args: any[]) => {
@ -81,7 +83,20 @@ export const createLogger = (config: Partial<Config>) => {
const child = (extra: any) => create(JSON.stringify(extra))
return { raw, dbg, warn, info, error, create, child, trace, debug: dbg }
return {
raw,
dbg,
warn,
info,
error,
create,
child,
trace,
debug: dbg,
shutdown() {
dbg(`Logger shutting down`)
},
}
}
export const logger = mkSingleton((config: Config) => createLogger(config))

View File

@ -0,0 +1,53 @@
import { values } from '@s-libs/micro-dash'
export type Unsubscribe = () => void
export type EventSubscriber<TPayload> = (
cb: EventHandler<TPayload>
) => Unsubscribe
export type EventEmitter<TPayload> = (
payload: TPayload,
stopOnHandled?: boolean
) => Promise<boolean>
export type EventHandler<TPayload> = (
payload: TPayload,
isHandled: boolean
) => boolean | void | Promise<boolean | void>
/**
*
* @param defaultHandler Optional handler to call if no handler calls `handled()`
* @returns void
*/
export const createEvent = <TPayload>(
defaultHandler?: EventHandler<TPayload>
): [EventSubscriber<TPayload>, EventEmitter<TPayload>] => {
let i = 0
const callbacks: any = {}
let callbacksArray: EventHandler<TPayload>[] = []
const onEvent = (cb: EventHandler<TPayload>) => {
const id = i++
callbacks[id] = cb
callbacksArray = values(callbacks)
return () => {
delete callbacks[id]
}
}
const fireEvent = async (payload: TPayload, stopOnHandled = false) => {
let _handled = false
for (let i = 0; i < callbacksArray.length; i++) {
const cb = callbacksArray[i]
if (!cb) continue
const res: boolean = !!(await cb(payload, _handled))
_handled = _handled || res
if (stopOnHandled && _handled) break
}
if (!_handled) {
await defaultHandler?.(payload, false)
}
return _handled
}
return [onEvent, fireEvent]
}

View File

@ -1,7 +1,9 @@
export * from './assert'
export * from './CleanupManager'
export * from './events'
export * from './Logger'
export * from './mkSingleton'
export * from './newId'
export * from './pocketbase-client-helpers'
export * from './safeCatch'
export * from './schema'

View File

@ -1,4 +1,10 @@
export const mkSingleton = <TConfig, TApi>(
export type SingletonApi = {
shutdown: () => void | Promise<void>
}
export const mkSingleton = <
TConfig,
TApi extends SingletonApi | Promise<SingletonApi>
>(
factory: (config: TConfig) => TApi
) => {
let _service: TApi | undefined = undefined

View File

@ -0,0 +1,4 @@
import { customAlphabet } from 'nanoid'
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz')
export const newId = () => nanoid(15)

View File

@ -1,4 +1,4 @@
import { customAlphabet } from 'nanoid'
import Ajv, { JSONSchemaType } from 'ajv'
import type pocketbaseEs from 'pocketbase'
import {
ClientResponseError,
@ -7,13 +7,17 @@ import {
} from 'pocketbase'
import type { JsonObject } from 'type-fest'
import { logger } from '../Logger'
import { newId } from '../newId'
import { safeCatch } from '../safeCatch'
import { BaseFields, RpcCommands, UserId } from '../schema'
import {
RpcCommands,
RpcFields,
RpcRecord_Create,
RpcStatus,
RPC_COLLECTION,
} from '../schema'
import type { WatchHelper } from './WatchHelper'
export const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz')
export const newId = () => nanoid(15)
export type RpcHelperConfig = {
client: pocketbaseEs
watchHelper: WatchHelper
@ -21,36 +25,6 @@ export type RpcHelperConfig = {
export type RpcHelper = ReturnType<typeof createRpcHelper>
export enum RpcStatus {
New = 'new',
Queued = 'queued',
Running = 'running',
Starting = 'starting',
FinishedSuccess = 'finished-success',
FinishedError = 'finished-error',
}
export type RpcPayloadBase = JsonObject
export type RpcRecord_Create<TRecord extends RpcFields<any, any>> = Pick<
TRecord,
'id' | 'userId' | 'payload' | 'cmd'
>
export type RpcFields<
TPayload extends RpcPayloadBase,
TRes extends JsonObject
> = BaseFields & {
userId: UserId
cmd: string
payload: TPayload
status: RpcStatus
message: string
result: TRes
}
export const RPC_COLLECTION = `rpc`
export const createRpcHelper = (config: RpcHelperConfig) => {
const {
client,
@ -58,22 +32,26 @@ export const createRpcHelper = (config: RpcHelperConfig) => {
} = config
const mkRpc = <TPayload extends JsonObject, TResult extends JsonObject>(
cmd: RpcCommands
cmd: RpcCommands,
schema: JSONSchemaType<TPayload>
) => {
type ConcreteRpcRecord = RpcFields<TPayload, TResult>
const { dbg } = logger().create('mkRpc')
const validator = new Ajv().compile(schema)
return safeCatch(
cmd,
async (
payload: TPayload,
cb?: (data: RecordSubscription<ConcreteRpcRecord>) => void
) => {
const { dbg } = logger().create(cmd)
const _user = client.authStore.model
if (!_user) {
throw new Error(`Expected authenticated user here.`)
}
if (!validator(payload)) {
throw new Error(`Invalid RPC payload: ${validator.errors}`)
}
const { id: userId } = _user
const rpcIn: RpcRecord_Create<ConcreteRpcRecord> = {
id: newId(),
@ -85,26 +63,36 @@ export const createRpcHelper = (config: RpcHelperConfig) => {
let unsub: UnsubscribeFunc | undefined
return new Promise<ConcreteRpcRecord['result']>(
async (resolve, reject) => {
unsub = await watchById<ConcreteRpcRecord>(
RPC_COLLECTION,
rpcIn.id,
(data) => {
dbg(`Got an RPC change`, data)
cb?.(data)
if (data.record.status === RpcStatus.FinishedSuccess) {
resolve(data.record.result)
return
}
if (data.record.status === RpcStatus.FinishedError) {
reject(new ClientResponseError(data.record.result))
return
}
},
false
)
await client.collection(RPC_COLLECTION).create(rpcIn)
try {
dbg(`Watching ${rpcIn.id}`)
unsub = await watchById<ConcreteRpcRecord>(
RPC_COLLECTION,
rpcIn.id,
(data) => {
dbg(`Got an RPC change`, data)
cb?.(data)
if (data.record.status === RpcStatus.FinishedSuccess) {
resolve(data.record.result)
return
}
if (data.record.status === RpcStatus.FinishedError) {
reject(new ClientResponseError(data.record.result))
return
}
},
{ initialFetch: false, pollIntervalMs: 100 }
)
dbg(`Creating ${rpcIn.id}`)
const newRpc = await client
.collection(RPC_COLLECTION)
.create(rpcIn)
dbg(`Created ${newRpc.id}`)
} catch (e) {
reject(e)
}
}
).finally(async () => {
dbg(`Unwatching ${rpcIn.id}`)
await unsub?.()
})
}

View File

@ -1,49 +1,91 @@
import type { BaseFields, RecordId } from '@pockethost/common'
import { logger, safeCatch } from '@pockethost/common'
import type pocketbaseEs from 'pocketbase'
import type { RecordSubscription, UnsubscribeFunc } from 'pocketbase'
import { logger } from '../Logger'
import { safeCatch } from '../safeCatch'
import { BaseFields, RecordId } from '../schema'
import { createTimerManager, UnixTimestampMs } from '../TimerManager'
export type WatchHelperConfig = {
client: pocketbaseEs
}
export type WatchConfig = {
initialFetch: boolean
pollIntervalMs: UnixTimestampMs
}
export type WatchHelper = ReturnType<typeof createWatchHelper>
export const createWatchHelper = (config: WatchHelperConfig) => {
const { client } = config
const { dbg } = logger().create(`watchHelper`)
const watchById = safeCatch(
`watchById`,
async <TRec>(
collectionName: string,
id: RecordId,
cb: (data: RecordSubscription<TRec>) => void,
initialFetch = true
options?: Partial<WatchConfig>
): Promise<UnsubscribeFunc> => {
const { dbg } = logger().create(`watchById:${collectionName}:${id}`)
const config: WatchConfig = {
initialFetch: true,
pollIntervalMs: 0,
...options,
}
const { initialFetch, pollIntervalMs } = config
const tm = createTimerManager({})
dbg(`watching ${collectionName}:${id}`)
let pollId: ReturnType<typeof setTimeout> | undefined
const _checkValue = async () => {
if (hasFinished) return
dbg(`Checking ${id} by polling`)
try {
const rec = await client.collection(collectionName).getOne<TRec>(id)
if (hasFinished) return
dbg(`Got an update polling ${collectionName}:${id}`)
cb({ action: 'poll', record: rec })
} catch (e) {
dbg(`Failed to poll at interval`, e)
} finally {
pollId = setTimeout(_checkValue, pollIntervalMs)
}
}
let hasUpdate = false
let hasFinished = false
if (pollIntervalMs) {
dbg(`Configuring polling for ${pollIntervalMs}ms`)
setTimeout(_checkValue, pollIntervalMs)
}
const unsub = await client
.collection(collectionName)
.subscribe<TRec>(id, (e) => {
hasUpdate = true
dbg(`Got an update watching ${collectionName}:${id}`, e)
clearTimeout(pollId)
if (pollIntervalMs) {
pollId = setTimeout(_checkValue, pollIntervalMs)
}
cb(e)
})
if (initialFetch) {
const initial = await client.collection(collectionName).getOne<TRec>(id)
if (!initial) {
try {
const initial = await client
.collection(collectionName)
.getOne<TRec>(id)
if (!hasUpdate && !hasFinished) {
// No update has been sent yet, send at least one
dbg(`Sending initial update for ${collectionName}:${id}`, initial)
cb({ action: 'initial', record: initial })
}
} catch (e) {
throw new Error(`Expected ${collectionName}.${id} to exist.`)
}
if (!hasUpdate) {
// No update has been sent yet, send at least one
dbg(`Sending initial update for ${collectionName}:${id}`, initial)
cb({ action: 'initial', record: initial })
}
}
return async () => {
dbg(`UNsubbing ${collectionName}:${id}`)
hasFinished = true
await unsub()
}
}

View File

@ -12,6 +12,12 @@ export enum InstanceStatus {
Failed = 'failed',
}
export type InstanceSecretKey = string
export type InstanceSecretValue = string
export type InstanceSecretCollection = {
[name: InstanceSecretKey]: InstanceSecretValue
}
export type InstanceFields = BaseFields & {
subdomain: Subdomain
uid: UserId
@ -20,6 +26,7 @@ export type InstanceFields = BaseFields & {
version: VersionId
secondsThisMonth: Seconds
isBackupAllowed: boolean
secrets: InstanceSecretCollection | null
}
export type InstanceFields_Create = Omit<InstanceFields, keyof BaseFields>

View File

@ -0,0 +1,16 @@
import { BaseFields } from './types'
export enum StreamNames {
Info = 'info',
Warning = 'warning',
Debug = 'debug',
Error = 'error',
System = 'system',
}
export type InstanceLogFields = BaseFields & {
message: string
stream: StreamNames
}
export type InstanceLogFields_Create = InstanceLogFields

View File

@ -0,0 +1,33 @@
import { JSONSchemaType } from 'ajv'
import { InstanceId } from '../types'
export type SaveSecretsPayload = {
instanceId: InstanceId
secrets: {
[_: string]: string
}
}
export type SaveSecretsResult = {
status: 'saved'
}
export const SECRET_KEY_REGEX = /^[A-Z][A-Z0-9_]*$/
export const SaveSecretsPayloadSchema: JSONSchemaType<SaveSecretsPayload> = {
type: 'object',
properties: {
instanceId: { type: 'string' },
secrets: {
type: 'object',
patternProperties: {
[SECRET_KEY_REGEX.source]: {
anyOf: [{ type: 'string' }],
},
},
required: [],
},
},
required: ['instanceId', 'secrets'],
additionalProperties: false,
}

View File

@ -1,14 +1,55 @@
import Ajv from 'ajv'
import { JsonObject } from 'type-fest'
import { BaseFields, UserId } from '../types'
export const RPC_COLLECTION = 'rpc'
export enum RpcCommands {
CreateInstance = 'create-instance',
BackupInstance = 'backup-instance',
RestoreInstance = 'restore-instance',
SaveSecrets = 'save-secrets',
// gen:enum
}
export const RPC_COMMANDS = [
RpcCommands.BackupInstance,
RpcCommands.CreateInstance,
RpcCommands.BackupInstance,
RpcCommands.RestoreInstance,
RpcCommands.SaveSecrets,
]
export enum RpcStatus {
New = 'new',
Queued = 'queued',
Running = 'running',
Starting = 'starting',
FinishedSuccess = 'finished-success',
FinishedError = 'finished-error',
}
export type RpcPayloadBase = JsonObject
export type RpcFields<
TPayload extends RpcPayloadBase,
TRes extends JsonObject
> = BaseFields & {
userId: UserId
cmd: string
payload: TPayload
status: RpcStatus
message: string
result: TRes
}
export type RpcRecord_Create<TRecord extends RpcFields<any, any>> = Pick<
TRecord,
'id' | 'userId' | 'payload' | 'cmd'
>
export const ajv = new Ajv()
export * from './BackupInstance'
export * from './CreateInstance'
export * from './RestoreInstance'
export * from './SaveSecrets'
// gen:export

View File

@ -1,5 +1,6 @@
export * from './Backup'
export * from './Instance'
export * from './InstanceLog'
export * from './Invocation'
export * from './Rpc'
export * from './types'

View File

@ -30,12 +30,14 @@
"node-fetch": "^3.3.0",
"pocketbase": "^0.8.0",
"semver": "^7.3.8",
"sqlite": "^4.1.2",
"sqlite3": "^5.1.2",
"tmp": "^0.2.1",
"type-fest": "^3.3.0",
"unzipper": "^0.10.11"
"unzipper": "^0.10.11",
"url-pattern": "^1.0.3"
},
"devDependencies": {
"tsx": "^3.11.0"
}
}
}

View File

@ -2,13 +2,9 @@ import { existsSync } from 'fs'
import { join } from 'path'
import { env, envb, envi } from './util/env'
export const PUBLIC_APP_PROTOCOL = env('PUBLIC_APP_PROTOCOL', 'https')
export const PUBLIC_PB_PROTOCOL = env('PUBLIC_PB_PROTOCOL', `https`)
export const PUBLIC_APP_DOMAIN = env('PUBLIC_APP_DOMAIN', `pockethost.test`)
export const PUBLIC_PB_DOMAIN = env('PUBLIC_PB_DOMAIN', `pockethost.test`)
export const PUBLIC_PB_SUBDOMAIN = env(
'PUBLIC_PB_SUBDOMAIN',
`pockethost-central`
)
export const PUBLIC_APP_DB = env('PUBLIC_APP_DB', `pockethost-central`)
export const DAEMON_PB_USERNAME = (() => {
const v = env('DAEMON_PB_USERNAME')
if (!v) {
@ -60,3 +56,5 @@ export const SSL_CERT = env('SSL_CERT')
export const PH_FTP_PASV_IP = env('PH_FTP_PASV_IP', '0.0.0.0')
export const PH_FTP_PASV_PORT_MIN = envi('PH_FTP_PASV_PORT_MIN', 10000)
export const PH_FTP_PASV_PORT_MAX = envi('PH_FTP_PASV_PORT_MAX', 20000)
export const DENO_PATH = env('DENO_PATH', `deno`)

View File

@ -1,9 +1,9 @@
import {
DAEMON_PB_PASSWORD,
DAEMON_PB_USERNAME,
PUBLIC_PB_DOMAIN,
PUBLIC_PB_PROTOCOL,
PUBLIC_PB_SUBDOMAIN,
PUBLIC_APP_DB,
PUBLIC_APP_DOMAIN,
PUBLIC_APP_PROTOCOL,
} from '$constants'
import { createPbClient, pocketbase, PocketbaseClientApi } from '$services'
import { logger, safeCatch } from '@pockethost/common'
@ -19,7 +19,7 @@ export const withInstance = safeCatch(
await pocketbase()
).spawn({
command: 'serve',
slug: PUBLIC_PB_SUBDOMAIN,
slug: PUBLIC_APP_DB,
})
try {
@ -29,7 +29,7 @@ export const withInstance = safeCatch(
await cb(client)
} catch (e) {
error(
`***WARNING*** CANNOT AUTHENTICATE TO ${PUBLIC_PB_PROTOCOL}://${PUBLIC_PB_SUBDOMAIN}.${PUBLIC_PB_DOMAIN}/_/`
`***WARNING*** CANNOT AUTHENTICATE TO ${PUBLIC_APP_PROTOCOL}://${PUBLIC_APP_DB}.${PUBLIC_APP_DOMAIN}/_/`
)
error(`***WARNING*** LOG IN MANUALLY, ADJUST .env, AND RESTART DOCKER`)
} finally {

View File

@ -1,4 +1,4 @@
import { DEBUG, PH_BIN_CACHE, PUBLIC_PB_SUBDOMAIN } from '$constants'
import { DEBUG, PH_BIN_CACHE, PUBLIC_APP_DB } from '$constants'
import {
backupService,
clientService,
@ -6,9 +6,13 @@ import {
instanceService,
pocketbase,
proxyService,
realtimeLog,
rpcService,
sqliteService,
} from '$services'
import { logger } from '@pockethost/common'
import { centralDbService } from './services/CentralDbService'
import { instanceLoggerService } from './services/InstanceLoggerService'
logger({ debug: DEBUG })
@ -28,7 +32,7 @@ global.EventSource = require('eventsource')
*/
const { url } = await pbService.spawn({
command: 'serve',
slug: PUBLIC_PB_SUBDOMAIN,
slug: PUBLIC_APP_DB,
})
/**
@ -37,10 +41,14 @@ global.EventSource = require('eventsource')
await clientService(url)
ftpService({})
await rpcService({})
await instanceService({})
await proxyService({
coreInternalUrl: url,
})
await instanceLoggerService({})
await sqliteService({})
await realtimeLog({})
await instanceService({})
await centralDbService({})
await backupService({})
info(`Hooking into process exit event`)
@ -49,6 +57,7 @@ global.EventSource = require('eventsource')
info(`Got signal ${signal}`)
info(`Shutting down`)
ftpService().shutdown()
;(await realtimeLog()).shutdown()
;(await backupService()).shutdown()
;(await proxyService()).shutdown()
;(await instanceService()).shutdown()

View File

@ -19,7 +19,7 @@ import Bottleneck from 'bottleneck'
export const backupService = mkSingleton(async () => {
const { dbg } = logger().create('BackupService')
const client = await clientService()
const { client } = await clientService()
const { registerCommand } = await rpcService()

View File

@ -0,0 +1,27 @@
import { PUBLIC_APP_DB } from '$constants'
import { logger, mkSingleton } from '@pockethost/common'
import { proxyService } from './ProxyService'
export const centralDbService = mkSingleton(async () => {
const { dbg } = logger().create(`centralDbService`)
;(await proxyService()).use(
PUBLIC_APP_DB,
['/api(/*)', '_(/*)'],
(req, res, meta) => {
const { subdomain, coreInternalUrl, proxy } = meta
if (subdomain !== PUBLIC_APP_DB) return
const target = coreInternalUrl
dbg(
`Forwarding proxy request for ${req.url} to central instance ${target}`
)
proxy.web(req, res, { target })
}
)
return {
shutdown() {},
}
})

View File

@ -0,0 +1,7 @@
import { mkSingleton } from '@pockethost/common'
export const frontendService = mkSingleton(async () => {
return {
shutdown() {},
}
})

View File

@ -17,7 +17,7 @@ export type FtpConfig = {}
export enum FolderNames {
PbData = 'pb_data',
PbStatic = 'pb_static',
PbWorkers = 'workers',
PbWorker = 'worker',
PbBackup = 'backup',
}
@ -25,7 +25,7 @@ export const README_CONTENTS: { [_ in FolderNames]: string } = {
[FolderNames.PbBackup]: `This directory contains tgz backups of your data. PocketHost creates these automatically, or you can create them manually. For more information, see https://pockethost.io/docs/backups`,
[FolderNames.PbData]: `This directory contains your PocketBase data. For more information, see https://pockethost.io/docs/data`,
[FolderNames.PbStatic]: `This directory contains static files such as your web frontend. PocketHost will serve these when your instance URL receives a request. For more information, see https://pockethost.io/docs/static `,
[FolderNames.PbWorkers]: `This directory contains your Deno workers. For more information, see https://pockethost.io/docs/workers`,
[FolderNames.PbWorker]: `This directory contains your Deno worker. For more information, see https://pockethost.io/docs/workers`,
}
export const README_NAME = 'readme.md'
@ -33,7 +33,7 @@ export const FOLDER_NAMES: FolderNames[] = [
FolderNames.PbBackup,
FolderNames.PbData,
FolderNames.PbStatic,
FolderNames.PbWorkers,
FolderNames.PbWorker,
]
export function isFolder(name: string): name is FolderNames {
@ -62,7 +62,7 @@ export const ftpService = mkSingleton((config: FtpConfig) => {
ftpServer.on(
'login',
async ({ connection, username, password }, resolve, reject) => {
const url = (await clientService()).url
const url = (await clientService()).client.url
const client = createPbClient(url)
try {
await client.client

View File

@ -0,0 +1,118 @@
import { DAEMON_PB_DATA_DIR } from '$constants'
import { SqliteChangeEvent, sqliteService } from '$services'
import {
InstanceId,
InstanceLogFields,
InstanceLogFields_Create,
logger,
mkSingleton,
newId,
pocketNow,
RecordId,
safeCatch,
StreamNames,
} from '@pockethost/common'
import { mkdirSync } from 'fs'
import knex from 'knex'
import { dirname, join } from 'path'
import { AsyncReturnType } from 'type-fest'
export type InstanceLogger = AsyncReturnType<typeof mkApi>
const mkApi = async (logDbPath: string) => {
const { dbg } = logger().create(logDbPath)
const { getDatabase } = sqliteService()
const db = await getDatabase(logDbPath)
const conn = knex({
client: 'sqlite3',
connection: {
filename: logDbPath,
},
})
const write = safeCatch(
`workerLogger:write`,
async (message: string, stream: StreamNames = StreamNames.Info) => {
const _in: InstanceLogFields_Create = {
id: newId(),
message,
stream,
created: pocketNow(),
updated: pocketNow(),
}
const sql = conn('logs').insert(_in).toString()
dbg(`Writing log ${JSON.stringify(_in)} ${sql}`)
await db.exec(sql)
}
)
const subscribe = (cb: (e: SqliteChangeEvent<InstanceLogFields>) => void) => {
let _seenIds: { [_: RecordId]: boolean } | undefined = {}
const unsub = db.subscribe<InstanceLogFields>((e) => {
// dbg(`Caught db modification ${logDbPath}`, e)
const { table, record } = e
if (table !== 'logs') return
if (_seenIds) {
_seenIds[record.id] = true
}
cb(e)
})
return unsub
}
const fetch = async (limit: number = 100) => {
return db.all<InstanceLogFields[]>(
`select * from logs order by created desc limit ${limit}`
)
}
return { write, subscribe, fetch }
}
const instances: {
[instanceId: InstanceId]: Promise<InstanceLogger>
} = {}
export const createInstanceLogger = (instanceId: InstanceId) => {
if (!instances[instanceId]) {
instances[instanceId] = new Promise<InstanceLogger>(async (resolve) => {
const { dbg } = logger().create(`WorkerLogger:${instanceId}`)
const logDbPath = join(
DAEMON_PB_DATA_DIR,
instanceId,
'pb_data',
'instance_logs.db'
)
dbg(`logs path`, logDbPath)
mkdirSync(dirname(logDbPath), { recursive: true })
dbg(`Running migrations`)
const { getDatabase } = sqliteService()
const db = await getDatabase(logDbPath)
await db.migrate({
migrationsPath: join(__dirname, 'migrations'),
})
const api = await mkApi(logDbPath)
await api.write(`Ran migrations`, StreamNames.System)
resolve(api)
})
}
return instances[instanceId]!
}
export const instanceLoggerService = mkSingleton(() => {
const { dbg } = logger().create(`InstanceLoggerService`)
dbg(`Starting up`)
return {
get: createInstanceLogger,
shutdown() {
dbg(`Shutting down`)
},
}
})

View File

@ -0,0 +1,23 @@
--------------------------------------------------------------------------------
-- Up
--------------------------------------------------------------------------------
CREATE TABLE "logs" (
"id" TEXT UNIQUE,
"created" TEXT NOT NULL,
"updated" TEXT NOT NULL,
"message" TEXT NOT NULL,
"stream" TEXT NOT NULL,
PRIMARY KEY("id")
);
CREATE INDEX "updated" ON "logs" (
"updated" DESC
);
--------------------------------------------------------------------------------
-- Down
--------------------------------------------------------------------------------
DROP INDEX "updated";
DROP TABLE "logs";

View File

@ -0,0 +1,93 @@
import { DAEMON_PB_DATA_DIR, DENO_PATH } from '$constants'
import { InstanceFields, logger, StreamNames } from '@pockethost/common'
import { keys } from '@s-libs/micro-dash'
import { spawn } from 'child_process'
import { join } from 'path'
import { AsyncReturnType } from 'type-fest'
import { mkInternalAddress, mkInternalUrl } from '../../../util/internal'
import { instanceLoggerService } from '../../InstanceLoggerService'
export type DenoProcessConfig = {
path: string
port: number
instance: InstanceFields
}
export type DenoApi = AsyncReturnType<typeof createDenoProcess>
export const createDenoProcess = async (config: DenoProcessConfig) => {
const { dbg, error } = logger().create(`DenoProcess.ts`)
const { instance, port, path } = config
const internalUrl = mkInternalUrl(port)
const instanceAddress = mkInternalAddress(port)
const secrets = instance.secrets || {}
const cmd = DENO_PATH
// deno index.ts
const allowEnv = ['POCKETBASE_URL', ...keys(instance.secrets)].join(',')
const args = [
`run`,
`--allow-env=${allowEnv}`,
`--allow-net=${instanceAddress}`,
path,
]
const denoLogger = await instanceLoggerService().get(instance.id)
const denoWrite = (
message: string,
stream: StreamNames = StreamNames.Info
) => {
dbg(`[${instance.id}:${path}:${stream}] ${message}`)
return denoLogger.write(message, stream)
}
const env = {
/**
* MAJOR SECURITY WARNING. DO NOT PASS process.env OR THE INSTANCE WILL
* GET FULL ADMIN CONTROL
*/
POCKETBASE_URL: internalUrl,
...secrets,
NO_COLOR: '1',
}
denoWrite(`Worker starting`, StreamNames.System)
dbg(`Worker starting`, cmd, args, env)
const denoProcess = spawn(cmd, args, { env })
const filter = (buf: Buffer) => {
return buf
.toString()
.replaceAll(join(DAEMON_PB_DATA_DIR, instance.id), instance.subdomain)
}
denoProcess.stderr.on('data', (buf: Buffer) => {
denoWrite(filter(buf), StreamNames.Error)
})
denoProcess.stdout.on('data', (buf: Buffer) => {
denoWrite(filter(buf))
})
const shutdownSignal = new Promise<void>((resolve) => {
denoProcess.on('exit', async (code, signal) => {
if (code) {
await denoWrite(
`Unexpected 'deno' exit code: ${code}.`,
StreamNames.Error
)
} else {
await denoWrite(
`Worker has exited with code ${code}`,
StreamNames.System
)
}
resolve()
})
})
return {
shutdown: async () => {
dbg(`Shutting down Deno`)
denoProcess.kill()
await shutdownSignal
},
}
}

View File

@ -1,10 +1,12 @@
import {
DAEMON_PB_DATA_DIR,
DAEMON_PB_IDLE_TTL,
DAEMON_PB_PORT_BASE,
PUBLIC_APP_DB,
PUBLIC_APP_DOMAIN,
PUBLIC_APP_PROTOCOL,
} from '$constants'
import { clientService, rpcService } from '$services'
import { clientService, proxyService, rpcService } from '$services'
import { mkInternalUrl, now } from '$util'
import {
assertTruthy,
@ -18,12 +20,19 @@ import {
mkSingleton,
RpcCommands,
safeCatch,
SaveSecretsPayload,
SaveSecretsPayloadSchema,
SaveSecretsResult,
} from '@pockethost/common'
import { forEachRight, map } from '@s-libs/micro-dash'
import Bottleneck from 'bottleneck'
import { existsSync } from 'fs'
import getPort from 'get-port'
import { join } from 'path'
import { AsyncReturnType } from 'type-fest'
import { instanceLoggerService } from '../InstanceLoggerService'
import { pocketbase, PocketbaseProcess } from '../PocketBaseService'
import { createDenoProcess } from './Deno/DenoProcess'
type InstanceApi = {
process: PocketbaseProcess
@ -39,7 +48,7 @@ export type InstanceServiceApi = AsyncReturnType<typeof instanceService>
export const instanceService = mkSingleton(
async (config: InstanceServiceConfig) => {
const { dbg, raw, error, warn } = logger().create('InstanceService')
const client = await clientService()
const { client } = await clientService()
const { registerCommand } = await rpcService()
@ -59,35 +68,48 @@ export const instanceService = mkSingleton(
platform: 'unused',
secondsThisMonth: 0,
isBackupAllowed: false,
secrets: {},
})
return { instance }
}
)
registerCommand<SaveSecretsPayload, SaveSecretsResult>(
RpcCommands.SaveSecrets,
SaveSecretsPayloadSchema,
async (job) => {
const { payload } = job
const { instanceId, secrets } = payload
await client.updateInstance(instanceId, { secrets })
return { status: 'saved' }
}
)
const instances: { [_: string]: InstanceApi } = {}
const instanceLimiter = new Bottleneck({ maxConcurrent: 1 })
const getInstance = (subdomain: string) =>
instanceLimiter.schedule(async () => {
// dbg(`Getting instance ${subdomain}`)
const { dbg, warn, error } = logger().create(subdomain)
dbg(`Getting instance`)
{
const instance = instances[subdomain]
if (instance) {
// dbg(`Found in cache: ${subdomain}`)
dbg(`Found in cache`)
return instance
}
}
const clientLimiter = new Bottleneck({ maxConcurrent: 1 })
dbg(`Checking ${subdomain} for permission`)
dbg(`Checking for permission`)
const [instance, owner] = await clientLimiter.schedule(() =>
client.getInstanceBySubdomain(subdomain)
)
if (!instance) {
dbg(`${subdomain} not found`)
dbg(`Instance not found`)
return
}
@ -100,7 +122,7 @@ export const instanceService = mkSingleton(
await clientLimiter.schedule(() =>
client.updateInstanceStatus(instance.id, InstanceStatus.Port)
)
dbg(`${subdomain} found in DB`)
dbg(`Instance found`)
const exclude = map(instances, (i) => i.port)
const newPort = await getPort({
port: DAEMON_PB_PORT_BASE,
@ -109,12 +131,20 @@ export const instanceService = mkSingleton(
error(`Failed to get port for ${subdomain}`)
throw e
})
dbg(`Found port for ${subdomain}: ${newPort}`)
dbg(`Found port: ${newPort}`)
await clientLimiter.schedule(() =>
client.updateInstanceStatus(instance.id, InstanceStatus.Starting)
)
const instanceLogger = await instanceLoggerService().get(instance.id)
await clientLimiter.schedule(() => {
dbg(`Instance status: starting`)
return client.updateInstanceStatus(
instance.id,
InstanceStatus.Starting
)
})
dbg(`Starting instance`)
await instanceLogger.write(`Starting instance`)
const childProcess = await pbService.spawn({
command: 'serve',
slug: instance.id,
@ -128,14 +158,44 @@ export const instanceService = mkSingleton(
const { pid } = childProcess
assertTruthy(pid, `Expected PID here but got ${pid}`)
dbg(`PocketBase instance PID: ${pid}`)
if (!instance.isBackupAllowed) {
await client.updateInstance(instance.id, { isBackupAllowed: true })
dbg(`Backups are now allowed`)
await clientLimiter.schedule(() =>
client.updateInstance(instance.id, { isBackupAllowed: true })
)
}
const invocation = await clientLimiter.schedule(() =>
client.createInvocation(instance, pid)
)
/**
* Deno worker
*/
const denoApi = await (async () => {
const workerPath = join(
DAEMON_PB_DATA_DIR,
instance.id,
`worker`,
`index.ts`
)
dbg(`Checking ${workerPath} for a worker entry point`)
if (existsSync(workerPath)) {
dbg(`Found worker ${workerPath}`)
await instanceLogger.write(`Starting worker`)
const api = await createDenoProcess({
path: workerPath,
port: newPort,
instance,
})
return api
} else {
dbg(`No worker found at ${workerPath}`)
}
})()
const tm = createTimerManager({})
const api: InstanceApi = (() => {
let openRequestCount = 0
@ -150,6 +210,10 @@ export const instanceService = mkSingleton(
shutdown: safeCatch(
`Instance ${subdomain} invocation ${invocation.id} pid ${pid} shutdown`,
async () => {
dbg(`Shutting down`)
await instanceLogger.write(`Shutting down instance`)
await instanceLogger.write(`Shutting down worker`)
await denoApi?.shutdown()
tm.shutdown()
await clientLimiter.schedule(() =>
client.finalizeInvocation(invocation)
@ -239,6 +303,36 @@ export const instanceService = mkSingleton(
})
}
;(await proxyService()).use(
(subdomain) => subdomain !== PUBLIC_APP_DB,
['/api(/*)', '/_(/*)'],
async (req, res, meta) => {
const { subdomain, host, proxy } = meta
// Do not handle central db requests, that is handled separately
if (subdomain === PUBLIC_APP_DB) return
const instance = await getInstance(subdomain)
if (!instance) {
throw new Error(
`${host} not found. Please check the instance URL and try again, or create one at ${PUBLIC_APP_PROTOCOL}://${PUBLIC_APP_DOMAIN}.`
)
}
if (req.closed) {
throw new Error(`Request already closed.`)
}
dbg(
`Forwarding proxy request for ${req.url} to instance ${instance.internalUrl}`
)
const endRequest = instance.startRequest()
res.on('close', endRequest)
proxy.web(req, res, { target: instance.internalUrl })
}
)
const maintenance = async (instanceId: InstanceId) => {}
return { getInstance, shutdown, maintenance }
}

View File

@ -1,28 +1,38 @@
import {
PUBLIC_APP_DOMAIN,
PUBLIC_APP_PROTOCOL,
PUBLIC_PB_SUBDOMAIN,
} from '$constants'
import { instanceService } from '$services'
import { logger, mkSingleton } from '@pockethost/common'
import { createServer } from 'http'
import httpProxy from 'http-proxy'
import { isFunction } from '@s-libs/micro-dash'
import {
createServer,
IncomingMessage,
RequestListener,
ServerResponse,
} from 'http'
import { default as httpProxy, default as Server } from 'http-proxy'
import { AsyncReturnType } from 'type-fest'
import UrlPattern from 'url-pattern'
export type ProxyServiceApi = AsyncReturnType<typeof proxyService>
export type ProxyMiddleware = (
req: IncomingMessage,
res: ServerResponse<IncomingMessage>,
meta: {
subdomain: string
coreInternalUrl: string
proxy: Server
host: string
}
) => void
export type ProxyServiceConfig = {
coreInternalUrl: string
}
export const proxyService = mkSingleton(async (config: ProxyServiceConfig) => {
const { dbg, error, info } = logger().create('ProxyService')
const { dbg, error, info, trace } = logger().create('ProxyService')
const { coreInternalUrl } = config
const proxy = httpProxy.createProxyServer({})
const { getInstance } = await instanceService()
const server = createServer(async (req, res) => {
dbg(`Incoming request ${req.headers.host}/${req.url}`)
@ -34,40 +44,8 @@ export const proxyService = mkSingleton(async (config: ProxyServiceConfig) => {
res.end(msg)
}
const host = req.headers.host
if (!host) {
throw new Error(`Host not found`)
}
const [subdomain, ...junk] = host.split('.')
if (!subdomain) {
throw new Error(`${host} has no subdomain.`)
}
try {
if (subdomain === PUBLIC_PB_SUBDOMAIN) {
const target = coreInternalUrl
dbg(`Forwarding proxy request for ${req.url} to instance ${target}`)
proxy.web(req, res, { target })
return
}
const instance = await getInstance(subdomain)
if (!instance) {
throw new Error(
`${host} not found. Please check the instance URL and try again, or create one at ${PUBLIC_APP_PROTOCOL}://${PUBLIC_APP_DOMAIN}.`
)
}
if (req.closed) {
throw new Error(`Request already closed.`)
}
dbg(
`Forwarding proxy request for ${req.url} to instance ${instance.internalUrl}`
)
const endRequest = instance.startRequest()
res.on('close', endRequest)
proxy.web(req, res, { target: instance.internalUrl })
middleware.forEach((handler) => handler(req, res))
} catch (e) {
die(`${e}`)
return
@ -88,5 +66,57 @@ export const proxyService = mkSingleton(async (config: ProxyServiceConfig) => {
})
}
return { shutdown }
const middleware: RequestListener[] = []
const use = (
subdomainFilter: string | ((subdomain: string) => boolean),
urlFilters: string | string[],
handler: ProxyMiddleware
) => {
const _urlFilters = Array.isArray(urlFilters)
? urlFilters.map((f) => new UrlPattern(f))
: [new UrlPattern(urlFilters)]
const _subdomainFilter = isFunction(subdomainFilter)
? subdomainFilter
: (subdomain: string) =>
subdomainFilter === '*' || subdomain === subdomainFilter
middleware.push((req, res) => {
const host = req.headers.host
if (!host) {
throw new Error(`Host not found`)
}
const [subdomain, ...junk] = host.split('.')
if (!subdomain) {
throw new Error(`${host} has no subdomain.`)
}
const { url } = req
if (!url) {
throw new Error(`Expected URL here`)
}
trace({ subdomainFilter, _urlFilters, host, url })
if (!_subdomainFilter(subdomain)) {
trace(`Subdomain ${subdomain} does not match filter ${subdomainFilter}`)
return
}
if (
!_urlFilters.find((u) => {
const isMatch = !!u.match(url)
if (isMatch) {
trace(`Matched ${url}`)
} else {
trace(`No match for ${url}`)
}
return isMatch
})
) {
trace(`${url} does not match pattern ${urlFilters}`)
return
}
dbg(`${url} matches ${urlFilters}, sending to handler`)
handler(req, res, { host, subdomain, coreInternalUrl, proxy })
})
}
return { shutdown, use }
})

View File

@ -0,0 +1,185 @@
import { PUBLIC_APP_DB } from '$src/constants'
import {
InstanceFields,
logger,
mkSingleton,
RecordId,
} from '@pockethost/common'
import Bottleneck from 'bottleneck'
import { text } from 'node:stream/consumers'
import pocketbaseEs from 'pocketbase'
import { JsonifiableObject } from 'type-fest/source/jsonifiable'
import { instanceLoggerService } from './InstanceLoggerService'
import { proxyService } from './ProxyService'
export type RealtimeLogConfig = {}
const mkEvent = (name: string, data: JsonifiableObject) => {
return `event: ${name}\ndata: ${JSON.stringify(data)}\n\n`
}
export type RealtimeLog = ReturnType<typeof realtimeLog>
export const realtimeLog = mkSingleton(async (config: RealtimeLogConfig) => {
const { dbg, error } = logger().create(`RealtimeLog.ts`)
;(await proxyService()).use(
PUBLIC_APP_DB,
'/logs',
async (req, res, meta) => {
const { subdomain, host, coreInternalUrl } = meta
if (!req.url?.startsWith('/logs')) {
return
}
const { dbg, error, trace } = logger().create(
`RealtimeLog:${subdomain}:${host}`
)
const write = async (data: any) => {
return new Promise<void>((resolve) => {
if (!res.write(data)) {
// dbg(`Waiting for drain after`, data)
res.once('drain', resolve)
} else {
// dbg(`Waiting for nexttick`, data)
process.nextTick(resolve)
}
})
}
/**
* Extract query params
*/
dbg(`Got a log request`)
const parsed = new URL(req.url, `https://${req.headers.host}`)
if (req.method === 'OPTIONS') {
// https://developer.mozilla.org/en-US/docs/Glossary/Preflight_request
res.setHeader('Access-Control-Allow-Origin', '*')
res.setHeader('Access-Control-Allow-Methods', 'POST, OPTIONS')
res.setHeader('Access-Control-Allow-Headers', 'content-type')
res.setHeader('Access-Control-Max-Age', 86400)
res.statusCode = 204
res.end()
return
}
// dbg(`Parsed URL is`, parsed)
const json = await text(req)
dbg(`JSON payload is`, json)
const payload = JSON.parse(json)
dbg(`Parsed payload is`, parsed)
const { instanceId, auth, n: nInitialRecords } = payload
if (!instanceId) {
throw new Error(`instanceId query param required in ${req.url}`)
}
if (!auth) {
throw new Error(`Expected 'auth' query param, but found ${req.url}`)
}
/**
* Validate auth token
*/
const client = new pocketbaseEs(coreInternalUrl)
client.authStore.loadFromCookie(auth)
dbg(`Cookie here is`, client.authStore.isValid)
await client.collection('users').authRefresh()
if (!client.authStore.isValid) {
throw new Error(`Cookie is invalid her`)
}
const user = client.authStore.model
if (!user) {
throw new Error(`Valid user expected here`)
}
dbg(`Cookie auth passed)`)
/**
* Validate instance and ownership
*/
dbg(`Got a log request for instance ID ${instanceId}`)
const instance = await client
.collection('instances')
.getOne<InstanceFields>(instanceId)
if (!instance) {
throw new Error(
`instanceId ${instanceId} not found for user ${user.id}`
)
}
dbg(`Instance is `, instance)
const limiter = new Bottleneck({ maxConcurrent: 1 })
/**
* Get a database connection
*/
const instanceLogger = await instanceLoggerService().get(instanceId)
const { subscribe } = instanceLogger
/**
* Start the stream
*/
res.writeHead(200, {
'Content-Type': 'text/event-stream; charset=UTF-8',
Connection: 'keep-alive',
'Cache-Control': 'no-store',
})
/**
* Track the IDs we send so we don't accidentally send old
* records in the initial burst (if one is requested)
*/
let _seenIds: { [_: RecordId]: boolean } | undefined = {}
const unsub = await subscribe((e) => {
trace(`Caught db modification ${instanceId}`, e)
const { table, record } = e
const evt = mkEvent(`log`, record)
trace(
`Dispatching SSE log event from ${instance.subdomain} (${instance.id})`,
evt
)
limiter.schedule(() => write(evt))
})
req.on('close', () => {
limiter.stop()
dbg(
`SSE request for ${instance.subdomain} (${instance.id}) closed. Unsubscribing.`
)
unsub()
})
/**
* Send initial batch if requested
*/
if (nInitialRecords > 0) {
dbg(`Fetching initial ${nInitialRecords} logs to prime history`)
const recs = await instanceLogger.fetch(nInitialRecords)
recs
.sort((a, b) => (a.created < b.created ? -1 : 1))
.forEach((rec) => {
limiter.schedule(async () => {
if (_seenIds?.[rec.id]) return // Skip if update already emitted
const evt = mkEvent(`log`, rec)
// dbg(
// `Dispatching SSE initial log event from ${instance.subdomain} (${instance.id})`,
// evt
// )
return write(evt)
})
})
limiter.schedule(async () => {
// Set seenIds to `undefined` so the subscribe listener stops tracking them.
_seenIds = undefined
})
}
return true
}
)
return {
shutdown() {
dbg(`shutdown`)
},
}
})

View File

@ -33,7 +33,7 @@ export type RpcServiceConfig = {}
export const rpcService = mkSingleton(async (config: RpcServiceConfig) => {
const { dbg, error } = logger().create('RpcService')
const client = await clientService()
const { client } = await clientService()
const limiter = new Bottleneck({ maxConcurrent: 1 })
@ -84,6 +84,7 @@ export const rpcService = mkSingleton(async (config: RpcServiceConfig) => {
if (!(e instanceof Error)) {
throw new Error(`Expected Error here but got ${typeof e}:${e}`)
}
dbg(`RPC failed with`, e)
await client.rejectRpc(rpc, e).catch((e) => {
error(`rpc ${rpc.id} failed to reject with ${e}`)
})

View File

@ -0,0 +1,108 @@
import {
createCleanupManager,
createEvent,
logger,
mkSingleton,
} from '@pockethost/common'
import Bottleneck from 'bottleneck'
import { Database as SqliteDatabase, open } from 'sqlite'
import { Database } from 'sqlite3'
import { JsonObject } from 'type-fest'
export type SqliteUnsubscribe = () => void
export type SqliteChangeHandler<TRecord extends JsonObject> = (
e: SqliteChangeEvent<TRecord>
) => void
export type SqliteEventType = 'update' | 'insert' | 'delete'
export type SqliteChangeEvent<TRecord extends JsonObject> = {
table: string
action: SqliteEventType
record: TRecord
}
export type SqliteServiceApi = {
all: SqliteDatabase['all']
get: SqliteDatabase['get']
migrate: SqliteDatabase['migrate']
exec: SqliteDatabase['exec']
subscribe: <TRecord extends JsonObject>(
cb: SqliteChangeHandler<TRecord>
) => SqliteUnsubscribe
}
export type SqliteServiceConfig = {}
export type SqliteService = ReturnType<typeof sqliteService>
export const sqliteService = mkSingleton((config: SqliteServiceConfig) => {
const { dbg, trace } = logger().create(`sqliteService`)
const connections: { [_: string]: Promise<SqliteServiceApi> } = {}
const cm = createCleanupManager()
const limiter = new Bottleneck({ maxConcurrent: 1 })
const getDatabase = async (filename: string): Promise<SqliteServiceApi> => {
const { dbg } = logger().create(`sqliteService:${filename}`)
trace(`Fetching database for ${filename}`, connections)
if (!connections[filename]) {
dbg(`${filename} is not yet opened`)
connections[filename] = new Promise<SqliteServiceApi>(async (resolve) => {
const db = await open({ filename, driver: Database })
dbg(`Database opened`)
db.db.addListener(
'change',
async (
eventType: SqliteEventType,
database: string,
table: string,
rowId: number
) => {
dbg(`Got a raw change event`, { eventType, database, table, rowId })
if (eventType === 'delete') return // Not supported
await limiter.schedule(async () => {
const record = await db.get(
`select * from ${table} where rowid = '${rowId}'`
)
const e: SqliteChangeEvent<any> = {
table,
action: eventType,
record,
}
fireChange(e)
})
}
)
cm.add(() => {
dbg(`Closing connection`)
db.db.removeAllListeners()
db.close()
})
db.migrate
const [onChange, fireChange] = createEvent<SqliteChangeEvent<any>>()
const api: SqliteServiceApi = {
all: db.all.bind(db),
get: db.get.bind(db),
migrate: db.migrate.bind(db),
exec: db.exec.bind(db),
subscribe: onChange,
}
resolve(api)
})
}
return connections[filename]!
}
const shutdown = async () => {
dbg(`Shutting down sqlite service`)
await limiter.stop()
await cm.shutdown()
}
return {
getDatabase,
shutdown,
}
})

View File

@ -44,6 +44,26 @@ export const createInstanceMixin = (context: MixinContext) => {
})
)
const getInstanceById = safeCatch(
`getInstanceById`,
async (
instanceId: InstanceId
): Promise<[InstanceFields, UserFields] | []> => {
return client
.collection('instances')
.getOne<InstanceFields>(instanceId)
.then((instance) => {
if (!instance) return []
return client
.collection('users')
.getOne<UserFields>(instance.uid)
.then((user) => {
return [instance, user]
})
})
}
)
const updateInstance = safeCatch(
`updateInstance`,
async (instanceId: InstanceId, fields: Partial<InstanceFields>) => {
@ -114,6 +134,7 @@ export const createInstanceMixin = (context: MixinContext) => {
)
return {
getInstanceById,
getInstances,
updateInstance,
updateInstanceStatus,

View File

@ -1,4 +1,4 @@
import { DAEMON_PB_DATA_DIR, PUBLIC_PB_SUBDOMAIN } from '$constants'
import { DAEMON_PB_DATA_DIR, PUBLIC_APP_DB } from '$constants'
import { logger, safeCatch } from '@pockethost/common'
import { Knex } from 'knex'
import {
@ -22,7 +22,7 @@ export const createPbClient = (url: string) => {
info(`Initializing client: ${url}`)
const rawDb = createRawPbClient(
`${DAEMON_PB_DATA_DIR}/${PUBLIC_PB_SUBDOMAIN}/pb_data/data.db`
`${DAEMON_PB_DATA_DIR}/${PUBLIC_APP_DB}/pb_data/data.db`
)
const client = new PocketBase(url)

View File

@ -1,10 +1,15 @@
import { logger } from '@pockethost/common'
import { existsSync } from 'fs'
import knex from 'knex'
export const createRawPbClient = (filename: string) => {
const { dbg } = logger().create(`rawPbClient`)
dbg(filename)
if (!existsSync(filename)) {
throw new Error(`${filename} not found for direct SQLite connection.`)
}
const conn = knex({
client: 'sqlite3',
connection: {

View File

@ -56,7 +56,7 @@ export const createRpcHelper = (config: RpcHelperConfig) => {
async (rpc: RpcFields<any, any>, err: Error) => {
const fields: Partial<RpcFields<any, any>> = {
status: RpcStatus.FinishedError,
result: JSON.stringify(err),
result: `${err}`,
}
return client
.collection(RPC_COLLECTION)

View File

@ -1,9 +1,9 @@
import {
DAEMON_PB_PASSWORD,
DAEMON_PB_USERNAME,
PUBLIC_PB_DOMAIN,
PUBLIC_PB_PROTOCOL,
PUBLIC_PB_SUBDOMAIN,
PUBLIC_APP_DB,
PUBLIC_APP_DOMAIN,
PUBLIC_APP_PROTOCOL,
} from '$constants'
import { logger, mkSingleton } from '@pockethost/common'
import { createPbClient } from './PbClient'
@ -16,9 +16,14 @@ export const clientService = mkSingleton(async (url: string) => {
dbg(`Logged in`)
} catch (e) {
error(
`***WARNING*** CANNOT AUTHENTICATE TO ${PUBLIC_PB_PROTOCOL}://${PUBLIC_PB_SUBDOMAIN}.${PUBLIC_PB_DOMAIN}/_/`
`***WARNING*** CANNOT AUTHENTICATE TO ${PUBLIC_APP_PROTOCOL}://${PUBLIC_APP_DB}.${PUBLIC_APP_DOMAIN}/_/`
)
error(`***WARNING*** LOG IN MANUALLY, ADJUST .env, AND RESTART DOCKER`)
}
return client
return {
client,
shutdown() {
dbg(`clientService shutdown`)
},
}
})

View File

@ -5,4 +5,6 @@ export * from './FtpService/FtpService'
export * from './InstanceService/InstanceService'
export * from './PocketBaseService'
export * from './ProxyService'
export * from './RpcService'
export * from './RealtimeLog'
export * from './RpcService/RpcService'
export * from './SqliteService/SqliteService'

View File

@ -1,6 +1,6 @@
{
"compilerOptions": {
"lib": [],
"lib": ["ES2021.String"],
"allowJs": true,
"checkJs": true,
"esModuleInterop": true,

View File

@ -0,0 +1,267 @@
/**
* https://raw.githubusercontent.com/MierenManz/EventSource/main/mod.ts
*
* Maintaining until https://github.com/MierenManz/EventSource/issues/32
*/
interface EventSourceInit {
withCredentials: boolean
}
type EventHandler<Evt extends Event> = (e: Evt) => void | Promise<void>
interface Settings {
url: string
fetchSettings: {
headers: string[][]
credentials: 'same-origin' | 'include'
mode: 'cors'
}
reconnectionTime: number
lastEventID: string
}
export class EventSource extends EventTarget {
#withCredentials = false
#readyState: 0 | 1 | 2 = 0
#abortController = new AbortController()
#settings: Settings = {
url: '',
fetchSettings: {
headers: [['Accept', 'text/event-stream']],
credentials: 'same-origin',
mode: 'cors',
},
reconnectionTime: 2200,
lastEventID: '',
}
onopen: EventHandler<Event> | null = null
onmessage: EventHandler<MessageEvent<string>> | null = null
onerror: EventHandler<Event> | null = null
CONNECTING: 0 = 0
OPEN: 1 = 1
CLOSED: 2 = 2
get readyState(): 0 | 1 | 2 {
return this.#readyState
}
get url(): string {
return this.#settings.url
}
get withCredentials(): boolean {
return this.#withCredentials
}
constructor(url: string, eventSourceInitDict?: EventSourceInit) {
super()
try {
// Allow empty url
// https://github.com/web-platform-tests/wpt/blob/master/eventsource/eventsource-constructor-empty-url.any.js
this.#settings.url =
url == ''
? window.location.toString()
: new URL(url, window.location?.href).toString()
} catch (e) {
// Dunno if this is allowed in the spec. But handy for testing purposes
if (e instanceof ReferenceError) {
this.#settings.url = new URL(url).toString()
} else throw new DOMException(e.message, 'SyntaxError')
}
if (eventSourceInitDict?.withCredentials) {
this.#settings.fetchSettings.credentials = 'include'
this.#withCredentials = true
}
this.#fetch()
return
}
close(): void {
this.#readyState = this.CLOSED
this.#abortController.abort()
}
#retry() {
const errorEvent = new Event('error', {
bubbles: false,
cancelable: false,
})
super.dispatchEvent(errorEvent)
this.onerror?.(errorEvent)
setTimeout(this.#fetch, 0)
}
async #fetch(): Promise<void> {
this.#readyState = this.CONNECTING
const res = await fetch(this.url, {
cache: 'no-store',
// This seems to cause problems if the abort happens while `res.body` is being used
// signal: this.#abortController.signal,
keepalive: true,
redirect: 'follow',
...this.#settings.fetchSettings,
}).catch(console.error)
if (
res?.body &&
res?.status === 200 &&
res.headers.get('content-type')?.startsWith('text/event-stream')
) {
// Announce connection
this.#readyState = this.OPEN
const openEvent = new Event('open', {
bubbles: false,
cancelable: false,
})
super.dispatchEvent(openEvent)
this.onopen?.(openEvent)
// Decode body for interpreting
const decoder = new TextDecoderStream('utf-8', {
ignoreBOM: false,
fatal: false,
})
const reader = res.body.pipeThrough(decoder)
// Initiate buffers
let lastEventIDBuffer = ''
let eventTypeBuffer = ''
let messageBuffer = ''
let readBuffer = ''
try {
for await (const chunk of reader) {
if (this.#abortController.signal.aborted) break
const lines = decodeURIComponent(readBuffer + chunk)
.replaceAll('\r\n', '\n')
.replaceAll('\r', '\n')
.split('\n')
readBuffer = lines.pop() ?? ''
// Start loop for interpreting
for (const line of lines) {
if (!line) {
this.#settings.lastEventID = lastEventIDBuffer
// Check if buffer is not an empty string
if (messageBuffer) {
// Create event
if (!eventTypeBuffer) {
eventTypeBuffer = 'message'
}
const event = new MessageEvent<string>(eventTypeBuffer, {
data: messageBuffer.trim(),
origin: res.url,
lastEventId: this.#settings.lastEventID,
cancelable: false,
bubbles: false,
})
if (this.readyState !== this.CLOSED) {
// Fire event
super.dispatchEvent(event)
if (this.onmessage) this.onmessage(event)
}
}
// Clear buffers
messageBuffer = ''
eventTypeBuffer = ''
continue
}
// Ignore comments
if (line[0] === ':') continue
let splitIndex = line.indexOf(':')
splitIndex = splitIndex > 0 ? splitIndex : line.length
const field = line.slice(0, splitIndex).trim()
const data = line.slice(splitIndex + 1).trim()
switch (field) {
case 'event':
// Set fieldBuffer to Field Value
eventTypeBuffer = data
break
case 'data':
// append Field Value to dataBuffer
messageBuffer += `${data}\n`
break
case 'id':
// set lastEventID to Field Value
if (
data &&
!data.includes('\u0000') &&
!data.includes('\x00')
) {
lastEventIDBuffer = data
}
break
case 'retry': {
// set reconnectionTime to Field Value if int
const num = Number(data)
if (!isNaN(num) && isFinite(num)) {
this.#settings.reconnectionTime = num
}
break
}
}
}
}
} catch (e) {
console.error(`caught an error!`, e)
} finally {
// Cancel reader to close the EventSource properly
await reader.cancel().catch(console.error)
this.#readyState = this.CLOSED
}
} else {
// Connection failed for whatever reason
this.#readyState = this.CLOSED
this.#abortController.abort()
const errorEvent = new Event('error', {
bubbles: false,
cancelable: false,
})
super.dispatchEvent(errorEvent)
this.onerror?.(errorEvent)
setTimeout(continuallyConnect, 0)
return
}
// Set readyState to CONNECTING
if (this.#readyState !== this.CLOSED) {
// Fire onerror
const errorEvent = new Event('error', {
bubbles: false,
cancelable: false,
})
super.dispatchEvent(errorEvent)
if (this.onerror) this.onerror(errorEvent)
// Timeout for re-establishing the connection
await new Promise<void>((res) => {
const id = setTimeout(
() => res(clearTimeout(id)),
this.#settings.reconnectionTime
)
})
if (this.#readyState !== this.CONNECTING) break
if (this.#settings.lastEventID) {
this.#settings.fetchSettings.headers.push([
'Last-Event-ID',
this.#settings.lastEventID,
])
}
}
}
}

View File

@ -0,0 +1,571 @@
import * as events from 'https://deno.land/std@0.167.0/node/events.ts'
import * as http from 'https://deno.land/std@0.167.0/node/http.ts'
import * as https from 'https://deno.land/std@0.167.0/node/https.ts'
import { parse } from 'https://deno.land/std@0.167.0/node/url.ts'
import * as util from 'https://deno.land/std@0.167.0/node/util.ts'
var httpsOptions = [
'pfx',
'key',
'passphrase',
'cert',
'ca',
'ciphers',
'rejectUnauthorized',
'secureProtocol',
'servername',
'checkServerIdentity',
]
var bom = [239, 187, 191]
var colon = 58
var space = 32
var lineFeed = 10
var carriageReturn = 13
// Beyond 256KB we could not observe any gain in performance
var maxBufferAheadAllocation = 1024 * 256
// Headers matching the pattern should be removed when redirecting to different origin
var reUnsafeHeader = /^(cookie|authorization)$/i
function hasBom(buf) {
return bom.every(function (charCode, index) {
return buf[index] === charCode
})
}
/**
* Creates a new EventSource object
*
* @param {String} url the URL to which to connect
* @param {Object} [eventSourceInitDict] extra init params. See README for details.
* @api public
**/
function EventSource(url, eventSourceInitDict) {
var readyState = EventSource.CONNECTING
var headers = eventSourceInitDict && eventSourceInitDict.headers
var hasNewOrigin = false
Object.defineProperty(this, 'readyState', {
get: function () {
return readyState
},
})
Object.defineProperty(this, 'url', {
get: function () {
return url
},
})
var self = this
self.reconnectInterval = 1000
self.connectionInProgress = false
function onConnectionClosed(message) {
if (readyState === EventSource.CLOSED) return
readyState = EventSource.CONNECTING
_emit('error', new Event('error', { message: message }))
// The url may have been changed by a temporary redirect. If that's the case,
// revert it now, and flag that we are no longer pointing to a new origin
if (reconnectUrl) {
url = reconnectUrl
reconnectUrl = null
hasNewOrigin = false
}
setTimeout(function () {
if (readyState !== EventSource.CONNECTING || self.connectionInProgress) {
return
}
self.connectionInProgress = true
connect()
}, self.reconnectInterval)
}
var req
var lastEventId = ''
if (headers && headers['Last-Event-ID']) {
lastEventId = headers['Last-Event-ID']
delete headers['Last-Event-ID']
}
var discardTrailingNewline = false
var data = ''
var eventName = ''
var reconnectUrl = null
function connect() {
var options = parse(url)
var isSecure = options.protocol === 'https:'
options.headers = {
'Cache-Control': 'no-cache',
Accept: 'text/event-stream',
}
if (lastEventId) options.headers['Last-Event-ID'] = lastEventId
if (headers) {
var reqHeaders = hasNewOrigin ? removeUnsafeHeaders(headers) : headers
for (var i in reqHeaders) {
var header = reqHeaders[i]
if (header) {
options.headers[i] = header
}
}
}
// Legacy: this should be specified as `eventSourceInitDict.https.rejectUnauthorized`,
// but for now exists as a backwards-compatibility layer
options.rejectUnauthorized = !(
eventSourceInitDict && !eventSourceInitDict.rejectUnauthorized
)
if (
eventSourceInitDict &&
eventSourceInitDict.createConnection !== undefined
) {
options.createConnection = eventSourceInitDict.createConnection
}
// If specify http proxy, make the request to sent to the proxy server,
// and include the original url in path and Host headers
var useProxy = eventSourceInitDict && eventSourceInitDict.proxy
if (useProxy) {
var proxy = parse(eventSourceInitDict.proxy)
isSecure = proxy.protocol === 'https:'
options.protocol = isSecure ? 'https:' : 'http:'
options.path = url
options.headers.Host = options.host
options.hostname = proxy.hostname
options.host = proxy.host
options.port = proxy.port
}
// If https options are specified, merge them into the request options
if (eventSourceInitDict && eventSourceInitDict.https) {
for (var optName in eventSourceInitDict.https) {
if (httpsOptions.indexOf(optName) === -1) {
continue
}
var option = eventSourceInitDict.https[optName]
if (option !== undefined) {
options[optName] = option
}
}
}
// Pass this on to the XHR
if (
eventSourceInitDict &&
eventSourceInitDict.withCredentials !== undefined
) {
options.withCredentials = eventSourceInitDict.withCredentials
}
req = (isSecure ? https : http).request(options, function (res) {
self.connectionInProgress = false
// Handle HTTP errors
if (
res.statusCode === 500 ||
res.statusCode === 502 ||
res.statusCode === 503 ||
res.statusCode === 504
) {
_emit(
'error',
new Event('error', {
status: res.statusCode,
message: res.statusMessage,
})
)
onConnectionClosed()
return
}
// Handle HTTP redirects
if (
res.statusCode === 301 ||
res.statusCode === 302 ||
res.statusCode === 307
) {
var location = res.headers.location
if (!location) {
// Server sent redirect response without Location header.
_emit(
'error',
new Event('error', {
status: res.statusCode,
message: res.statusMessage,
})
)
return
}
var prevOrigin = new URL(url).origin
var nextOrigin = new URL(location).origin
hasNewOrigin = prevOrigin !== nextOrigin
if (res.statusCode === 307) reconnectUrl = url
url = location
process.nextTick(connect)
return
}
if (res.statusCode !== 200) {
_emit(
'error',
new Event('error', {
status: res.statusCode,
message: res.statusMessage,
})
)
return self.close()
}
readyState = EventSource.OPEN
res.on('close', function () {
res.removeAllListeners('close')
res.removeAllListeners('end')
onConnectionClosed()
})
res.on('end', function () {
res.removeAllListeners('close')
res.removeAllListeners('end')
onConnectionClosed()
})
_emit('open', new Event('open'))
// text/event-stream parser adapted from webkit's
// Source/WebCore/page/EventSource.cpp
var buf
var newBuffer
var startingPos = 0
var startingFieldLength = -1
var newBufferSize = 0
var bytesUsed = 0
res.on('data', function (chunk) {
if (!buf) {
buf = chunk
if (hasBom(buf)) {
buf = buf.slice(bom.length)
}
bytesUsed = buf.length
} else {
if (chunk.length > buf.length - bytesUsed) {
newBufferSize = buf.length * 2 + chunk.length
if (newBufferSize > maxBufferAheadAllocation) {
newBufferSize =
buf.length + chunk.length + maxBufferAheadAllocation
}
newBuffer = Buffer.alloc(newBufferSize)
buf.copy(newBuffer, 0, 0, bytesUsed)
buf = newBuffer
}
chunk.copy(buf, bytesUsed)
bytesUsed += chunk.length
}
var pos = 0
var length = bytesUsed
while (pos < length) {
if (discardTrailingNewline) {
if (buf[pos] === lineFeed) {
++pos
}
discardTrailingNewline = false
}
var lineLength = -1
var fieldLength = startingFieldLength
var c
for (var i = startingPos; lineLength < 0 && i < length; ++i) {
c = buf[i]
if (c === colon) {
if (fieldLength < 0) {
fieldLength = i - pos
}
} else if (c === carriageReturn) {
discardTrailingNewline = true
lineLength = i - pos
} else if (c === lineFeed) {
lineLength = i - pos
}
}
if (lineLength < 0) {
startingPos = length - pos
startingFieldLength = fieldLength
break
} else {
startingPos = 0
startingFieldLength = -1
}
parseEventStreamLine(buf, pos, fieldLength, lineLength)
pos += lineLength + 1
}
if (pos === length) {
buf = void 0
bytesUsed = 0
} else if (pos > 0) {
buf = buf.slice(pos, bytesUsed)
bytesUsed = buf.length
}
})
})
req.on('error', function (err) {
self.connectionInProgress = false
onConnectionClosed(err.message)
})
if (req.setNoDelay) req.setNoDelay(true)
req.end()
}
connect()
function _emit() {
if (self.listeners(arguments[0]).length > 0) {
self.emit.apply(self, arguments)
}
}
this._close = function () {
if (readyState === EventSource.CLOSED) return
readyState = EventSource.CLOSED
if (req.abort) req.abort()
if (req.xhr && req.xhr.abort) req.xhr.abort()
}
function parseEventStreamLine(buf, pos, fieldLength, lineLength) {
if (lineLength === 0) {
if (data.length > 0) {
var type = eventName || 'message'
_emit(
type,
new MessageEvent(type, {
data: data.slice(0, -1), // remove trailing newline
lastEventId: lastEventId,
origin: new URL(url).origin,
})
)
data = ''
}
eventName = void 0
} else if (fieldLength > 0) {
var noValue = fieldLength < 0
var step = 0
var field = buf
.slice(pos, pos + (noValue ? lineLength : fieldLength))
.toString()
if (noValue) {
step = lineLength
} else if (buf[pos + fieldLength + 1] !== space) {
step = fieldLength + 1
} else {
step = fieldLength + 2
}
pos += step
var valueLength = lineLength - step
var value = buf.slice(pos, pos + valueLength).toString()
if (field === 'data') {
data += value + '\n'
} else if (field === 'event') {
eventName = value
} else if (field === 'id') {
lastEventId = value
} else if (field === 'retry') {
var retry = parseInt(value, 10)
if (!Number.isNaN(retry)) {
self.reconnectInterval = retry
}
}
}
}
}
export default EventSource
util.inherits(EventSource, events.EventEmitter)
EventSource.prototype.constructor = EventSource // make stacktraces readable
;['open', 'error', 'message'].forEach(function (method) {
Object.defineProperty(EventSource.prototype, 'on' + method, {
/**
* Returns the current listener
*
* @return {Mixed} the set function or undefined
* @api private
*/
get: function get() {
var listener = this.listeners(method)[0]
return listener
? listener._listener
? listener._listener
: listener
: undefined
},
/**
* Start listening for events
*
* @param {Function} listener the listener
* @return {Mixed} the set function or undefined
* @api private
*/
set: function set(listener) {
this.removeAllListeners(method)
this.addEventListener(method, listener)
},
})
})
/**
* Ready states
*/
Object.defineProperty(EventSource, 'CONNECTING', { enumerable: true, value: 0 })
Object.defineProperty(EventSource, 'OPEN', { enumerable: true, value: 1 })
Object.defineProperty(EventSource, 'CLOSED', { enumerable: true, value: 2 })
EventSource.prototype.CONNECTING = 0
EventSource.prototype.OPEN = 1
EventSource.prototype.CLOSED = 2
/**
* Closes the connection, if one is made, and sets the readyState attribute to 2 (closed)
*
* @see https://developer.mozilla.org/en-US/docs/Web/API/EventSource/close
* @api public
*/
EventSource.prototype.close = function () {
this._close()
}
/**
* Emulates the W3C Browser based WebSocket interface using addEventListener.
*
* @param {String} type A string representing the event type to listen out for
* @param {Function} listener callback
* @see https://developer.mozilla.org/en/DOM/element.addEventListener
* @see http://dev.w3.org/html5/websockets/#the-websocket-interface
* @api public
*/
EventSource.prototype.addEventListener = function addEventListener(
type,
listener
) {
if (typeof listener === 'function') {
// store a reference so we can return the original function again
listener._listener = listener
this.on(type, listener)
}
}
/**
* Emulates the W3C Browser based WebSocket interface using dispatchEvent.
*
* @param {Event} event An event to be dispatched
* @see https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/dispatchEvent
* @api public
*/
EventSource.prototype.dispatchEvent = function dispatchEvent(event) {
if (!event.type) {
throw new Error('UNSPECIFIED_EVENT_TYPE_ERR')
}
this.emit(event.type, event)
}
/**
* Emulates the W3C Browser based WebSocket interface using removeEventListener.
*
* @param {String} type A string representing the event type to remove
* @param {Function} listener callback
* @see https://developer.mozilla.org/en/DOM/element.removeEventListener
* @see http://dev.w3.org/html5/websockets/#the-websocket-interface
* @api public
*/
EventSource.prototype.removeEventListener = function removeEventListener(
type,
listener
) {
if (typeof listener === 'function') {
listener._listener = undefined
this.removeListener(type, listener)
}
}
/**
* W3C Event
*
* @see http://www.w3.org/TR/DOM-Level-3-Events/#interface-Event
* @api private
*/
function Event(type, optionalProperties) {
Object.defineProperty(this, 'type', {
writable: false,
value: type,
enumerable: true,
})
if (optionalProperties) {
for (var f in optionalProperties) {
if (optionalProperties.hasOwnProperty(f)) {
Object.defineProperty(this, f, {
writable: false,
value: optionalProperties[f],
enumerable: true,
})
}
}
}
}
/**
* W3C MessageEvent
*
* @see http://www.w3.org/TR/webmessaging/#event-definitions
* @api private
*/
function MessageEvent(type, eventInitDict) {
Object.defineProperty(this, 'type', {
writable: false,
value: type,
enumerable: true,
})
for (var f in eventInitDict) {
if (eventInitDict.hasOwnProperty(f)) {
Object.defineProperty(this, f, {
writable: false,
value: eventInitDict[f],
enumerable: true,
})
}
}
}
/**
* Returns a new object of headers that does not include any authorization and cookie headers
*
* @param {Object} headers An object of headers ({[headerName]: headerValue})
* @return {Object} a new object of headers
* @api private
*/
function removeUnsafeHeaders(headers) {
var safe = {}
for (var key in headers) {
if (reUnsafeHeader.test(key)) {
continue
}
safe[key] = headers[key]
}
return safe
}

View File

@ -0,0 +1,88 @@
/**
* The Event interface represents any event which takes place in the DOM; some are user-generated (such as mouse or keyboard events), while others are generated by APIs (such as
* events that indicate an animation has finished running, a video has been paused, and so forth). While events are usually triggered by such "external" sources, they can also be
* triggered programmatically, such as by calling the HTMLElement.click() method of an element, or by defining the event, then sending it to a specified target using
* EventTarget.dispatchEvent(). There are many types of events, some of which use other interfaces based on the main Event interface. Event itself contains the properties and
* methods which are common to all events.
*/
interface Event {
/**
* Returns true or false depending on how event was initialized. True if event goes through its target's ancestors in reverse tree order, and false otherwise.
*/
readonly bubbles: boolean
cancelBubble: boolean
readonly cancelable: boolean
/**
* Returns true or false depending on how event was initialized. True if event invokes listeners past a ShadowRoot node that is the root of its target, and false otherwise.
*/
readonly composed: boolean
readonly defaultPrevented: boolean
readonly eventPhase: number
/**
* Returns true if event was dispatched by the user agent, and
* false otherwise.
*/
readonly isTrusted: boolean
returnValue: boolean
/**
* Returns the event's timestamp as the number of milliseconds measured relative to
* the time origin.
*/
readonly timeStamp: number
/**
* Unauthorized and redirect error status codes (for example 401, 403, 301, 307)
*/
readonly status?: number | undefined
/**
* Returns the type of event, e.g.
* "click", "hashchange", or
* "submit".
*/
readonly type: string
readonly AT_TARGET: number
readonly BUBBLING_PHASE: number
readonly CAPTURING_PHASE: number
readonly NONE: number
composedPath(): any[]
initEvent(type: string, bubbles?: boolean, cancelable?: boolean): void
preventDefault(): void
/**
* Invoking this method prevents event from reaching
* any registered event listeners after the current one finishes running and, when dispatched in a tree, also prevents event from reaching any
* other objects.
*/
stopImmediatePropagation(): void
/**
* When dispatched in a tree, invoking this method prevents event from reaching any objects other than the current object.
*/
stopPropagation(): void
}
interface EventInit {
bubbles?: boolean | undefined
cancelable?: boolean | undefined
composed?: boolean | undefined
}
interface MessageEventInit<T = any> extends EventInit {
data?: T | undefined
lastEventId?: string | undefined
origin?: string | undefined
}
/** The MessageEvent interface represents a message received by a target object. */
interface MessageEvent<T = any> extends Event {
/**
* Returns the data of the message.
*/
readonly data: T
/**
* Returns the last event ID string, for server-sent events.
*/
readonly lastEventId: string
/**
* Returns the origin of the message, for server-sent events and
* cross-document messaging.
*/
readonly origin: string
}

56
packages/deno-worker/index.d.ts vendored Normal file
View File

@ -0,0 +1,56 @@
// Type definitions for eventsource 1.1
// Project: https://github.com/EventSource/eventsource
// Definitions by: Scott Lee Davis <https://github.com/scottleedavis>
// Ali Afroozeh <https://github.com/afroozeh>
// Pedro Gámez <https://github.com/snakedrak>
// Akuukis <https://github.com/Akuukis>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// eventsource uses DOM dependencies which are absent in a browserless environment like node.js.
// to avoid compiler errors this monkey patch is used. See more details in:
// - sinon: https://github.com/DefinitelyTyped/DefinitelyTyped/issues/11351
// - rxjs: https://github.com/ReactiveX/rxjs/issues/1986
/// <reference path="./dom-monkeypatch.d.ts" />
declare class EventSource {
static readonly CLOSED: number
static readonly CONNECTING: number
static readonly OPEN: number
constructor(
url: string,
eventSourceInitDict?: EventSource.EventSourceInitDict
)
readonly CLOSED: number
readonly CONNECTING: number
readonly OPEN: number
readonly url: string
readonly readyState: number
readonly withCredentials: boolean
onopen: (evt: MessageEvent) => any
onmessage: (evt: MessageEvent) => any
onerror: (evt: MessageEvent) => any
addEventListener(type: string, listener: (evt: MessageEvent) => void): void
dispatchEvent(evt: Event): boolean
removeEventListener(type: string, listener: (evt: MessageEvent) => void): void
close(): void
}
declare namespace EventSource {
enum ReadyState {
CONNECTING = 0,
OPEN = 1,
CLOSED = 2,
}
interface EventSourceInitDict {
withCredentials?: boolean | undefined
headers?: object | undefined
proxy?: string | undefined
https?: object | undefined
rejectUnauthorized?: boolean | undefined
}
}
export = EventSource

View File

@ -0,0 +1,38 @@
// import { EventSource as EventSourceClass } from 'https://cdn.jsdelivr.net/gh/MierenManz/EventSource@53f3ec9001d1eac19645c2214652a6a7aa3a51cb/mod.ts'
// @deno-types="./index.d.ts"
import EventSourceClass from './EventSource2.js'
// @deno-types="https://cdn.jsdelivr.net/npm/pocketbase/dist/pocketbase.es.d.ts"
import PocketBase from 'https://cdn.jsdelivr.net/npm/pocketbase'
declare global {
// deno-lint-ignore no-var
var EventSource: typeof EventSourceClass
}
globalThis.EventSource = EventSourceClass
export const init = (klass: typeof PocketBase) => {
const POCKETBASE_URL = Deno.env.get('POCKETBASE_URL')
const ADMIN_LOGIN = Deno.env.get('ADMIN_LOGIN') || ''
const ADMIN_PASSWORD = Deno.env.get('ADMIN_PASSWORD') || ''
if (!POCKETBASE_URL) {
throw new Error(`POCKETBASE_URL must be defined.`)
}
const client = new klass(POCKETBASE_URL)
const adminAuthWithPassword = async (
login = ADMIN_LOGIN,
password = ADMIN_PASSWORD
) => {
console.log(`Connecting to ${POCKETBASE_URL} with ${ADMIN_LOGIN}`)
await client.admins.authWithPassword(login, password)
console.log(`Successfully logged in as ${ADMIN_LOGIN}.`)
return client
}
return {
adminAuthWithPassword,
client,
}
}

View File

@ -0,0 +1,16 @@
// import from 'https://raw.githubusercontent.com/benallfree/pockethost/deno-workers/packages/deno-worker/index.ts'
import { init } from '../../../deno-worker/index.ts'
// @deno-types="https://cdn.jsdelivr.net/npm/pocketbase/dist/pocketbase.es.d.ts"
import PocketBase from 'https://cdn.jsdelivr.net/npm/pocketbase'
const { client, adminAuthWithPassword } = init(PocketBase)
try {
await adminAuthWithPassword()
setInterval(() => {
console.log(`ping`)
}, 1000)
} catch (e) {
console.error(`caught an error`, e, JSON.stringify(e))
}

View File

@ -27,11 +27,16 @@
},
"type": "module",
"dependencies": {
"@microsoft/fetch-event-source": "https://github.com/Almar/fetch-event-source.git#pr/make_web_worker_friendly",
"@pockethost/common": "0.0.1",
"@s-libs/micro-dash": "^14.1.0",
"@types/bootstrap": "^5.2.6",
"@types/d3-scale": "^4.0.3",
"@types/d3-scale-chromatic": "^3.0.0",
"@types/js-cookie": "^3.0.2",
"boolean": "^3.2.0",
"d3-scale": "^4.0.2",
"d3-scale-chromatic": "^3.0.0",
"date-fns": "^2.29.3",
"js-cookie": "^3.0.1",
"pocketbase": "^0.8.0",
@ -41,4 +46,4 @@
"svelte-highlight": "^6.2.1",
"type-fest": "^3.3.0"
}
}
}

View File

@ -1,4 +1,3 @@
import { browser } from '$app/environment'
import { env as _env } from '$env/dynamic/public'
import { boolean } from 'boolean'
import base from '../../../package.json'
@ -16,10 +15,6 @@ export const envb = (name: string, _default: boolean) => boolean(env(name, _defa
export const PUBLIC_APP_DB = env('PUBLIC_APP_DB', 'pockethost-central')
export const PUBLIC_APP_DOMAIN = env('PUBLIC_APP_DOMAIN', 'pockethost.io')
export const PUBLIC_APP_PROTOCOL = env('PUBLIC_APP_PROTOCOL', 'https')
export const PUBLIC_PB_PROTOCOL = env(
'PUBLIC_PB_PROTOCOL',
browser ? window.location.protocol : 'https'
)
export const PUBLIC_DEBUG = envb('PUBLIC_DEBUG', false)
export const PUBLIC_POCKETHOST_VERSION = base.version

View File

@ -1,11 +1,16 @@
import { createGenericSyncEvent } from '$util/events'
import { fetchEventSource } from '@microsoft/fetch-event-source'
import {
assertExists,
BackupInstancePayloadSchema,
CreateInstancePayloadSchema,
createRpcHelper,
createWatchHelper,
logger,
RestoreInstancePayloadSchema,
RpcCommands,
safeCatch,
SaveSecretsPayloadSchema,
type BackupFields,
type BackupInstancePayload,
type BackupInstanceResult,
@ -15,7 +20,10 @@ import {
type InstanceId,
type RestoreInstancePayload,
type RestoreInstanceResult,
type UserFields
type SaveSecretsPayload,
type SaveSecretsResult,
type UserFields,
type WorkerLogFields
} from '@pockethost/common'
import { keys, map } from '@s-libs/micro-dash'
import PocketBase, {
@ -112,14 +120,21 @@ export const createPocketbaseClient = (config: PocketbaseClientConfig) => {
const { mkRpc } = rpcMixin
const createInstance = mkRpc<CreateInstancePayload, CreateInstanceResult>(
RpcCommands.CreateInstance
RpcCommands.CreateInstance,
CreateInstancePayloadSchema
)
const createInstanceBackupJob = mkRpc<BackupInstancePayload, BackupInstanceResult>(
RpcCommands.BackupInstance
RpcCommands.BackupInstance,
BackupInstancePayloadSchema
)
const createInstanceRestoreJob = mkRpc<RestoreInstancePayload, RestoreInstanceResult>(
RpcCommands.RestoreInstance
RpcCommands.RestoreInstance,
RestoreInstancePayloadSchema
)
const saveSecrets = mkRpc<SaveSecretsPayload, SaveSecretsResult>(
RpcCommands.SaveSecrets,
SaveSecretsPayloadSchema
)
const getInstanceById = safeCatch(
@ -228,7 +243,57 @@ export const createPocketbaseClient = (config: PocketbaseClientConfig) => {
})
}
const watchInstanceLog = (
instanceId: InstanceId,
update: (log: WorkerLogFields) => void,
nInitial = 100
): (() => void) => {
const auth = client.authStore.exportToCookie()
const controller = new AbortController()
const signal = controller.signal
const continuallyFetchFromEventSource = () => {
dbg(`Subscribing to ${url}`)
fetchEventSource(`${url}/logs`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
instanceId,
n: nInitial,
auth
}),
onmessage: (event) => {
// dbg(`Got stream event`, event)
const {} = event
const log = JSON.parse(event.data) as WorkerLogFields
// dbg(`Log is`, log)
update(log)
},
onopen: async (response) => {
dbg(`Stream is open`, response)
},
onerror: (e) => {
dbg(`Stream error`, e)
},
onclose: () => {
setTimeout(continuallyFetchFromEventSource, 100)
dbg(`Stream closed`)
},
signal
})
}
continuallyFetchFromEventSource()
return () => {
controller.abort()
}
}
return {
saveSecrets,
watchInstanceLog,
getAuthStoreProps,
parseError,
getInstanceById,

View File

@ -5,8 +5,10 @@
import Backup from './Backup.svelte'
import Code from './Code.svelte'
import Ftp from './Ftpx.svelte'
import Logging from './Logging.svelte'
import Overview from './Overview.svelte'
import Restore from './Restore.svelte'
import Secrets from './Secrets/Secrets.svelte'
import { instance } from './store'
assertExists($instance, `Expected instance here`)
@ -22,6 +24,8 @@
<Overview instance={$instance} />
<Ftp instance={$instance} />
<Code instance={$instance} />
<Secrets instance={$instance} />
<Logging instance={$instance} />
<Backup instance={$instance} />
<Restore instance={$instance} />
{/if}

View File

@ -31,7 +31,7 @@
<th>backups</th><td>Location of tgz backups made using this UI</td>
</tr>
<tr>
<th>workers</th><td>Deno workers (cloud TS/JS functions)</td>
<th>worker</th><td>Deno worker (cloud TS/JS functions)</td>
</tr>
</table>
</div>

View File

@ -0,0 +1,95 @@
<script lang="ts">
import { client } from '$src/pocketbase'
import { dbg } from '$util/logger'
import type { InstanceFields, InstanceLogFields, RecordId } from '@pockethost/common'
import { createCleanupManager } from '@pockethost/common'
import { values } from '@s-libs/micro-dash'
import { onDestroy, onMount } from 'svelte'
import { writable } from 'svelte/store'
export let instance: InstanceFields
const { id } = instance
const logs = writable<{ [_: RecordId]: InstanceLogFields }>({})
let logsArray: InstanceLogFields[] = []
const cm = createCleanupManager()
onMount(async () => {
dbg(`Watching instance log`)
const unsub = client().watchInstanceLog(id, (newLog) => {
dbg(`Got new log`, newLog)
logs.update((currentLogs) => {
return { ...currentLogs, [newLog.id]: newLog }
})
logsArray = values($logs)
.sort((a, b) => (a.created > b.created ? 1 : -1))
.slice(0, 1000)
.reverse()
})
cm.add(unsub)
})
onDestroy(cm.shutdown)
</script>
<h2>Instance Logging</h2>
<p>
Instance logs appear here in realtime. They include instance events and errors, and Deno worker
events and errors.
</p>
<div class="log-window">
{#each logsArray as log}
<div class="log">
<div class="time">{log.created}</div>
<div class={`stream ${log.stream}`}>{log.stream}</div>
<div class={`message ${log.stream}`}>
{(() => {
try {
const parsed = JSON.parse(log.message)
return `<pre><code>${parsed}</code></pre>`
} catch (e) {
return log.message
}
})()}
</div>
</div>
{/each}
</div>
<style lang="scss">
.log-window {
border: 1px solid gray;
padding: 5px;
height: 500px;
overflow: auto;
display: flex;
flex-direction: column-reverse;
white-space: nowrap;
.log {
position: relative;
font-family: monospace;
.time {
color: gray;
display: inline-block;
}
.stream {
color: gray;
display: inline-block;
&.system {
color: orange;
}
&.info {
color: blue;
}
&.error {
color: red;
}
}
.message {
display: inline-block;
}
}
}
</style>

View File

@ -0,0 +1,126 @@
<script lang="ts">
import { SECRET_KEY_REGEX } from '@pockethost/common'
// import the items as described in the store
import { items } from './stores.js'
// variables bound to the input elements
let name: string = ''
let value: string = ''
let isKeyValid = false
let isValueValid = false
let isFormValid = false
// following the submit event, proceed to update the store by adding the item or updating its value (if existing)
const handleSubmit = () => {
// findIndex returns -1 if a match is not found
const index = $items.findIndex((item) => item.name === name)
if (index === -1) {
items.create({ name, value })
} else {
items.update({ name, value })
}
name = ''
value = ''
}
$: {
name = name.toUpperCase()
value = value.trim()
isKeyValid = !!name.match(SECRET_KEY_REGEX)
isValueValid = value.length > 0
isFormValid = isKeyValid && isValueValid
console.log({ isFormValid })
}
</script>
<!-- form component
introduce the component with a heading
describe the form with input elements of type text and number
-->
<section>
<!-- display a form with 2 fields
- input[type="text"], describing the name
- input[type="number"], describing the value (price, cost, currently undecided)
-->
<!-- wrap each input in a label -->
<label>
<span>Key</span>
<input required type="text" bind:value={name} />
</label>
<label>
<span>Value</span>
<input type="password" bind:value placeholder="" />
</label>
<!-- describe the action of the icon button through aria attributes -->
<button
aria-label="Create entry"
aria-describedby="description"
disabled={!isFormValid}
on:click={() => handleSubmit()}
>
<span id="description" hidden>Add the key value pair to the list of items</span>
<svg viewBox="0 0 100 100" width="40" height="40">
<use href="#create" />
</svg>
</button>
{#if !isKeyValid && name.length > 0}
<div class="text-danger">
All key names must be upper case, alphanumeric, and may include underscore (_).
</div>
{/if}
</section>
<style lang="scss">
h2 {
font-size: 13pt;
}
/* display the input in a wrapping row
flip the hue for the color and background
*/
section {
display: flex;
flex-wrap: wrap;
align-items: center;
color: hsl(240, 25%, 20%);
background: hsla(240, 25%, 95%, 1);
padding: 0.75rem 1rem;
border-radius: 5px;
margin-bottom: 20px;
label {
flex-grow: 1;
display: flex;
flex-direction: column;
font-size: 1rem;
line-height: 2;
input {
font-size: 1.1rem;
color: inherit;
font-family: inherit;
background: none;
border: none;
padding: 0.5rem 0.75rem;
border-left: 0.25rem solid currentColor;
}
}
button {
align-self: flex-end;
background: none;
border: none;
color: inherit;
width: 2.75rem;
height: 2.75rem;
padding: 0.25rem;
margin: 0.25rem 0;
&:disabled {
opacity: 0.5;
}
svg {
display: block;
width: 100%;
height: 100%;
}
}
}
</style>

View File

@ -0,0 +1,87 @@
<script type="ts">
// import the items as described in the store
import { items } from './stores'
</script>
{#if $items.length > 0}
<!-- introduce the section with a heading and describe the items in a main element -->
<section>
<!-- display the articles in a grid, specifying the name and numerical values in a column -->
<main>
{#each $items as item (item.name)}
<article style="border-color: {item.color}">
<h2>{item.name}</h2>
<div class="value">
{item.value.slice(0, 2)}{item.value.slice(2).replaceAll(/./g, '*')}
</div>
<!-- following a click on the button update the store with the delete operation -->
<button on:click={() => items.delete(item.name)} aria-label="Delete">
<svg viewBox="0 0 100 100" width="30" height="30">
<use href="#delete" />
</svg>
</button>
</article>
{/each}
</main>
</section>
{/if}
<style lang="scss">
/* display the items as squares in a grid */
main {
display: grid;
justify-content: center;
grid-template-columns: repeat(auto-fill, 150px);
grid-auto-rows: 150px;
grid-gap: 2rem;
}
/* display the text elements in a column */
article {
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
background: hsla(240, 25%, 50%, 0.1);
border: 5px solid currentColor;
border-radius: 25px;
position: relative;
h2 {
font-weight: 400;
font-size: 10pt;
}
.value {
font-size: 10pt;
text-overflow: ellipsis;
white-space: nowrap;
font-weight: 700;
overflow: hidden;
width: 100%;
padding-left: 5px;
padding-right: 5px;
}
button {
position: absolute;
top: 0%;
right: 0%;
transform: translate(50%, -50%);
background: none;
border: none;
border-radius: 50%;
width: 1.5rem;
height: 1.5rem;
color: inherit;
background: currentColor;
/* use the same hue as the background to fake a clip on the border underneath */
box-shadow: 0 0 0 0.5rem hsl(240, 25%, 20%);
svg {
display: block;
width: 100%;
height: 100%;
color: hsl(240, 25%, 20%);
}
}
}
/* absolute position the button in the top right corner */
</style>

View File

@ -0,0 +1,88 @@
<script lang="ts">
import CodeSample from '$components/CodeSample.svelte'
import { client } from '$src/pocketbase'
import type { InstanceFields, SaveSecretsPayload } from '@pockethost/common'
import { createCleanupManager } from '@pockethost/common'
import { forEach, reduce } from '@s-libs/micro-dash'
import { onDestroy, onMount } from 'svelte'
import Form from './Form.svelte'
import List from './List.svelte'
import { items } from './stores'
import SvgIcons from './SvgIcons.svelte'
export let instance: InstanceFields
const cm = createCleanupManager()
onMount(() => {
forEach(instance.secrets || {}, (value, name) => {
items.create({ name, value })
})
const unsub = items.subscribe(async (secrets) => {
await client().saveSecrets({
instanceId: instance.id,
secrets: reduce(
secrets,
(c, v) => {
const { name, value } = v
c[name] = value
return c
},
{} as SaveSecretsPayload['secrets']
)
})
})
cm.add(unsub)
})
onDestroy(cm.shutdown)
</script>
<div class="py-4">
<div class="secrets">
<h2>Secrets</h2>
<p>These secrets are passed into your Deno cloud worker as environment variables.</p>
<CodeSample
code={$items
.map((secret) => `const ${secret.name} = Deno.env.get('${secret.name}')`)
.join('\n')}
/>
<SvgIcons />
<Form />
<List />
</div>
</div>
<style lang="scss">
.secrets {
box-sizing: border-box;
padding: 0;
margin: 0;
}
.secrets {
color: hsl(240, 25%, 95%);
h2 {
position: relative;
padding: 0.25rem;
span {
position: absolute;
top: 0%;
right: 100%;
transform: translateY(-50%);
display: block;
width: 1.25em;
height: 1.25em;
border-radius: 0.75rem;
background: hsla(240, 25%, 50%, 0.3);
}
span,
span svg {
display: block;
width: 100%;
height: 100%;
filter: drop-shadow(0 0 3px hsla(240, 25%, 0%, 0.5));
}
}
}
</style>

View File

@ -0,0 +1,40 @@
<!-- describe the graphics included throughout the project -->
<svg viewBox="0 0 100 100" width="40" height="40" style="display: none;">
<symbol id="add">
<g fill="none" stroke="currentColor" stroke-width="7" stroke-linecap="round">
<path d="M 50 35 v 30 m -15 -15 h 30"></path>
</g>
</symbol>
<symbol id="create">
<g fill="none" stroke="currentColor" stroke-width="7" stroke-linecap="round">
<g transform="translate(76 24)">
<path d="M -20 0 h -37.5 a 15 15 0 0 0 -15 15 v 42.5 a 15 15 0 0 0 15 15 h 42.5 a 15 15 0 0 0 15 -15 v -37.5"></path>
<circle cx="0" cy="0" r="20"></circle>
<path stroke-width="5" d="M 0 -7 v 14 m -7 -7 h 14"></path>
</g>
</g>
</symbol>
<symbol id="list">
<g fill="none" stroke="currentColor" stroke-width="7" stroke-linecap="round">
<path d="M 50 35 h 20"></path>
<path d="M 30 50 h 40"></path>
<path d="M 30 65 h 20"></path>
</g>
</symbol>
<symbol id="delete">
<g transform="translate(50 50)">
<g transform="rotate(45)">
<g fill="none" stroke="currentColor" stroke-width="10" stroke-linecap="round">
<path d="M 0 -20 v 40 m -20 -20 h 40"></path>
</g>
</g>
</g>
</symbol>
<symbol id="highlight">
<g fill="none" stroke="currentColor" stroke-width="7" stroke-linecap="round">
<path d="M 35 65 v -7.5"></path>
<path d="M 50 65 v -15"></path>
<path d="M 65 65 v -30"></path>
</g>
</symbol>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@ -0,0 +1,77 @@
import { scaleOrdinal } from 'd3-scale'
import { schemeTableau10 } from 'd3-scale-chromatic'
import { writable } from 'svelte/store'
// color scale used in both visualizations
const colorScale = scaleOrdinal(schemeTableau10)
// in the store describe a list of items by name and value
export type SecretItem = {
name: string
value: string
color?: string
}
export type SecretsArray = SecretItem[]
// function to sort the input array and add a color according to the sorted values
function formatInput(input: SecretsArray): SecretsArray {
return input
.sort((a, b) => (a.name < b.name ? -1 : 1))
.map(({ name, value }, index) => ({
name,
value,
color: colorScale(index.toString())
}))
}
const sanitize = (item: SecretItem) => {
return {
name: item.name.toUpperCase().trim(),
value: item.value.trim()
}
}
// create a custom store fulfilling the CRUD operations
function createItems(initialItems: SecretsArray) {
const { subscribe, set, update } = writable(initialItems)
return {
subscribe,
// create: add an object for the item at the end of the store's array
create: (item: SecretItem) => {
const { name, value } = sanitize(item)
return update((n) => {
n = [
...n,
{
name,
value
}
]
return formatInput(n)
})
},
// update: increase the value of the selected item
update: (item: SecretItem) => {
const { name, value } = sanitize(item)
return update((n) => {
const index = n.findIndex((item) => item.name === name)
n[index].value += value
return formatInput(n)
})
},
// delete: remove the item from the array
delete: (name: string) => {
return update((n) => {
const index = n.findIndex((item) => item.name === name)
n = [...n.slice(0, index), ...n.slice(index + 1)]
return formatInput(n)
})
}
}
}
export const items = createItems(formatInput([]))

View File

@ -26,8 +26,8 @@ const server = createServer(options, async (req, res) => {
res.end()
return
}
res.setHeader('Access-Control-Allow-Origin', '*')
console.log(req.headers.host)
const { host } = req.headers
try {

View File

@ -0,0 +1,30 @@
diff --git a/node_modules/@microsoft/fetch-event-source/package.json b/node_modules/@microsoft/fetch-event-source/package.json
index 8528735..2e9bac3 100644
--- a/node_modules/@microsoft/fetch-event-source/package.json
+++ b/node_modules/@microsoft/fetch-event-source/package.json
@@ -9,9 +9,9 @@
},
"author": "Microsoft",
"license": "MIT",
- "main": "lib/cjs/index.js",
- "module": "lib/esm/index.js",
- "types": "lib/cjs/index.d.ts",
+ "main": "src/index.ts",
+ "module": "src/index.ts",
+ "types": "src/index.ts",
"sideEffects": false,
"scripts": {
"clean": "rimraf ./lib ./coverage",
diff --git a/node_modules/@microsoft/fetch-event-source/src/fetch.ts b/node_modules/@microsoft/fetch-event-source/src/fetch.ts
index 1e2236c..ccfdf6d 100644
--- a/node_modules/@microsoft/fetch-event-source/src/fetch.ts
+++ b/node_modules/@microsoft/fetch-event-source/src/fetch.ts
@@ -84,7 +84,7 @@ export function fetchEventSource(input: RequestInfo, {
}
let retryInterval = DefaultRetryInterval;
- let retryTimer = 0;
+ let retryTimer : ReturnType<typeof globalThis['setTimeout']> | undefined = undefined;
function dispose() {
globalThis.document?.removeEventListener('visibilitychange', onVisibilityChange);
globalThis.clearTimeout(retryTimer);

View File

@ -159,6 +159,10 @@
semver "^7.3.5"
tar "^6.1.11"
"@microsoft/fetch-event-source@https://github.com/Almar/fetch-event-source.git#pr/make_web_worker_friendly":
version "2.0.3"
resolved "https://github.com/Almar/fetch-event-source.git#4ded4d4f5b215b938d7223398d6f2be6473a4c27"
"@mischnic/json-sourcemap@^0.1.0":
version "0.1.0"
resolved "https://registry.yarnpkg.com/@mischnic/json-sourcemap/-/json-sourcemap-0.1.0.tgz#38af657be4108140a548638267d02a2ea3336507"
@ -921,6 +925,23 @@
resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.5.1.tgz#b29aa1f91a59f35e29ff8f7cb24faf1a3a750554"
integrity sha512-COUnqfB2+ckwXXSFInsFdOAWQzCCx+a5hq2ruyj+Vjund94RJQd4LG2u9hnvJrTgunKAaax7ancBYlDrNYxA0g==
"@types/d3-scale-chromatic@^3.0.0":
version "3.0.0"
resolved "https://registry.yarnpkg.com/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz#103124777e8cdec85b20b51fd3397c682ee1e954"
integrity sha512-dsoJGEIShosKVRBZB0Vo3C8nqSDqVGujJU6tPznsBJxNJNwMF8utmS83nvCBKQYPpjCzaaHcrf66iTRpZosLPw==
"@types/d3-scale@^4.0.3":
version "4.0.3"
resolved "https://registry.yarnpkg.com/@types/d3-scale/-/d3-scale-4.0.3.tgz#7a5780e934e52b6f63ad9c24b105e33dd58102b5"
integrity sha512-PATBiMCpvHJSMtZAMEhc2WyL+hnzarKzI6wAHYjhsonjWJYGq5BXTzQjv4l8m2jO183/4wZ90rKvSeT7o72xNQ==
dependencies:
"@types/d3-time" "*"
"@types/d3-time@*":
version "3.0.0"
resolved "https://registry.yarnpkg.com/@types/d3-time/-/d3-time-3.0.0.tgz#e1ac0f3e9e195135361fa1a1d62f795d87e6e819"
integrity sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg==
"@types/estree@*", "@types/estree@^1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2"
@ -1519,6 +1540,63 @@ csso@^4.2.0:
dependencies:
css-tree "^1.1.2"
"d3-array@2 - 3", "d3-array@2.10.0 - 3":
version "3.2.1"
resolved "https://registry.yarnpkg.com/d3-array/-/d3-array-3.2.1.tgz#39331ea706f5709417d31bbb6ec152e0328b39b3"
integrity sha512-gUY/qeHq/yNqqoCKNq4vtpFLdoCdvyNpWoC/KNjhGbhDuQpAM9sIQQKkXSNpXa9h5KySs/gzm7R88WkUutgwWQ==
dependencies:
internmap "1 - 2"
"d3-color@1 - 3":
version "3.1.0"
resolved "https://registry.yarnpkg.com/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2"
integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==
"d3-format@1 - 3":
version "3.1.0"
resolved "https://registry.yarnpkg.com/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641"
integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==
"d3-interpolate@1 - 3", "d3-interpolate@1.2.0 - 3":
version "3.0.1"
resolved "https://registry.yarnpkg.com/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d"
integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==
dependencies:
d3-color "1 - 3"
d3-scale-chromatic@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz#15b4ceb8ca2bb0dcb6d1a641ee03d59c3b62376a"
integrity sha512-Lx9thtxAKrO2Pq6OO2Ua474opeziKr279P/TKZsMAhYyNDD3EnCffdbgeSYN5O7m2ByQsxtuP2CSDczNUIZ22g==
dependencies:
d3-color "1 - 3"
d3-interpolate "1 - 3"
d3-scale@^4.0.2:
version "4.0.2"
resolved "https://registry.yarnpkg.com/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396"
integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==
dependencies:
d3-array "2.10.0 - 3"
d3-format "1 - 3"
d3-interpolate "1.2.0 - 3"
d3-time "2.1.1 - 3"
d3-time-format "2 - 4"
"d3-time-format@2 - 4":
version "4.1.0"
resolved "https://registry.yarnpkg.com/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a"
integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==
dependencies:
d3-time "1 - 3"
"d3-time@1 - 3", "d3-time@2.1.1 - 3":
version "3.1.0"
resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-3.1.0.tgz#9310db56e992e3c0175e1ef385e545e48a9bb5c7"
integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==
dependencies:
d3-array "2 - 3"
data-uri-to-buffer@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz#b5db46aea50f6176428ac05b73be39a57701a64b"
@ -2235,6 +2313,11 @@ inherits@2, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.3:
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
"internmap@1 - 2":
version "2.0.3"
resolved "https://registry.yarnpkg.com/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009"
integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==
interpret@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9"
@ -3528,6 +3611,11 @@ sqlite3@^5.1.2:
optionalDependencies:
node-gyp "8.x"
sqlite@^4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/sqlite/-/sqlite-4.1.2.tgz#35643893dc1cec118c835cbab9ddbd283ffaba7d"
integrity sha512-FlBG51gHbux5vPjwnoqFEghNGvnTMTbHyiI09U3qFTQs9AtWuwd4i++6+WCusCXKrVdIDLzfdGekrolr3m4U4A==
ssri@^8.0.0, ssri@^8.0.1:
version "8.0.1"
resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.1.tgz#638e4e439e2ffbd2cd289776d5ca457c4f51a2af"
@ -3859,6 +3947,11 @@ uri-js@^4.2.2:
dependencies:
punycode "^2.1.0"
url-pattern@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/url-pattern/-/url-pattern-1.0.3.tgz#0409292471b24f23c50d65a47931793d2b5acfc1"
integrity sha512-uQcEj/2puA4aq1R3A2+VNVBgaWYR24FdWjl7VNW83rnWftlhyzOZ/tBjezRiC2UkIzuxC8Top3IekN3vUf1WxA==
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"