mirror of
https://github.com/pockethost/pockethost.git
synced 2025-06-07 06:36:43 +00:00
53 db snapshots (#85)
This commit is contained in:
parent
14bebf2d0e
commit
a8ed9b9c93
@ -1,4 +1,4 @@
|
|||||||
FROM node:18-alpine as pockethost-buildbox
|
FROM node:18-alpine as pockethost-buildbox
|
||||||
COPY --from=golang:1.19.3-alpine /usr/local/go/ /usr/local/go/
|
COPY --from=golang:1.19.3-alpine /usr/local/go/ /usr/local/go/
|
||||||
ENV PATH="/usr/local/go/bin:${PATH}"
|
ENV PATH="/usr/local/go/bin:${PATH}"
|
||||||
RUN apk add python3 py3-pip make gcc musl-dev g++ bash
|
RUN apk add python3 py3-pip make gcc musl-dev g++ bash sqlite
|
@ -9,7 +9,7 @@ services:
|
|||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
container_name: prepbox
|
container_name: prepbox
|
||||||
working_dir: /src
|
working_dir: /src
|
||||||
command: bash -c "yarn"
|
command: bash -c "yarn && npx patch-package"
|
||||||
volumes:
|
volumes:
|
||||||
- ./mount/cache/go:/go
|
- ./mount/cache/go:/go
|
||||||
- ./mount/cache/yarn:/usr/local/share/.cache/yarn/v6
|
- ./mount/cache/yarn:/usr/local/share/.cache/yarn/v6
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
import { client } from '../client'
|
|
||||||
import {
|
|
||||||
Pb_Any_Record_Db,
|
|
||||||
Pb_CollectionName,
|
|
||||||
Pb_PkId,
|
|
||||||
Pb_Untrusted_Db,
|
|
||||||
} from '../schema/base'
|
|
||||||
|
|
||||||
export const createRealtimeSubscriptionManager = () => {
|
|
||||||
const subscriptions: { [_: string]: number } = {}
|
|
||||||
|
|
||||||
const subscribe = <TRec extends Pb_Any_Record_Db>(
|
|
||||||
collectionName: Pb_CollectionName,
|
|
||||||
cb: (rec: Pb_Untrusted_Db<TRec>) => void,
|
|
||||||
id?: Pb_PkId
|
|
||||||
) => {
|
|
||||||
const slug = id ? `${collectionName}/${id}` : collectionName
|
|
||||||
|
|
||||||
if (subscriptions[slug]) {
|
|
||||||
subscriptions[slug]++
|
|
||||||
} else {
|
|
||||||
subscriptions[slug] = 1
|
|
||||||
client.realtime.subscribe(slug, (e) => {
|
|
||||||
console.log(`Realtime update`, { e })
|
|
||||||
cb(e.record as unknown as Pb_Untrusted_Db<TRec>)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return () => {
|
|
||||||
subscriptions[slug]--
|
|
||||||
if (subscriptions[slug] === 0) {
|
|
||||||
console.log(`Realtime unsub`)
|
|
||||||
client.realtime.unsubscribe(slug)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return subscribe
|
|
||||||
}
|
|
@ -1,6 +1,5 @@
|
|||||||
export * from './getOne'
|
export * from './getOne'
|
||||||
export * from './onAuthStateChanged'
|
export * from './onAuthStateChanged'
|
||||||
export * from './pbUid'
|
export * from './pbUid'
|
||||||
export * from './RealtimeSubscriptionManager'
|
|
||||||
export * from './signInAnonymously'
|
export * from './signInAnonymously'
|
||||||
export * from './upsert'
|
export * from './upsert'
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
import PocketBase, { Record } from 'pocketbase'
|
|
||||||
import { CollectionName, RecordId } from './schema'
|
|
||||||
|
|
||||||
export interface RecordSubscription<T = Record> {
|
|
||||||
action: string
|
|
||||||
record: T
|
|
||||||
}
|
|
||||||
|
|
||||||
export type RealtimeEventHandler<TRec> = (e: RecordSubscription<TRec>) => void
|
|
||||||
|
|
||||||
export const createRealtimeSubscriptionManager = (pocketbase: PocketBase) => {
|
|
||||||
const subscriptions: { [_: string]: number } = {}
|
|
||||||
|
|
||||||
const subscribeOne = <TRec>(
|
|
||||||
collection: CollectionName,
|
|
||||||
id: RecordId,
|
|
||||||
cb: (e: RecordSubscription<TRec>) => void
|
|
||||||
) => {
|
|
||||||
const slug = `${collection}/${id}`
|
|
||||||
if (subscriptions[slug]) {
|
|
||||||
subscriptions[slug]++
|
|
||||||
} else {
|
|
||||||
subscriptions[slug] = 1
|
|
||||||
pocketbase.collection(collection).subscribeOne<TRec>(id, (e) => {
|
|
||||||
console.log(`Realtime update`, { e })
|
|
||||||
cb(e)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return () => {
|
|
||||||
subscriptions[slug]--
|
|
||||||
if (subscriptions[slug] === 0) {
|
|
||||||
pocketbase.collection(collection).unsubscribe(id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return { subscribeOne }
|
|
||||||
}
|
|
49
packages/common/src/TimerManager.ts
Normal file
49
packages/common/src/TimerManager.ts
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
import { forEach } from '@s-libs/micro-dash'
|
||||||
|
|
||||||
|
export type UnixTimestampMs = number
|
||||||
|
export type TimerCanceler = () => void
|
||||||
|
|
||||||
|
export type Config = {}
|
||||||
|
|
||||||
|
export const createTimerManager = (config: Config) => {
|
||||||
|
let i = 0
|
||||||
|
const cleanups: any = {}
|
||||||
|
|
||||||
|
const add = (cb: () => void, ms: UnixTimestampMs) => {
|
||||||
|
const idx = i++
|
||||||
|
const tid = setTimeout(() => {
|
||||||
|
cancel()
|
||||||
|
cb()
|
||||||
|
}, ms)
|
||||||
|
const cancel = () => {
|
||||||
|
clearTimeout(tid)
|
||||||
|
delete cleanups[idx]
|
||||||
|
}
|
||||||
|
cleanups[idx] = cancel
|
||||||
|
return cancel
|
||||||
|
}
|
||||||
|
|
||||||
|
const shutdown = () => {
|
||||||
|
// console.log(`Canceling all`, cleanups)
|
||||||
|
forEach(cleanups, (c) => c())
|
||||||
|
// console.log(`done`, cleanups)
|
||||||
|
}
|
||||||
|
|
||||||
|
const repeat = (
|
||||||
|
cb: () => Promise<boolean> | boolean,
|
||||||
|
ms: UnixTimestampMs
|
||||||
|
) => {
|
||||||
|
let _unsub: TimerCanceler | undefined = undefined
|
||||||
|
const _again = async () => {
|
||||||
|
const shouldRepeat = await cb()
|
||||||
|
if (shouldRepeat) _unsub = add(_again, ms)
|
||||||
|
}
|
||||||
|
_again()
|
||||||
|
return () => {
|
||||||
|
_unsub?.()
|
||||||
|
_unsub = undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { add, shutdown, repeat }
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
export * from './assert'
|
export * from './assert'
|
||||||
export * from './RealtimeSubscriptionManager'
|
|
||||||
export * from './releases'
|
export * from './releases'
|
||||||
export * from './schema'
|
export * from './schema'
|
||||||
|
export * from './TimerManager'
|
||||||
|
@ -1,54 +0,0 @@
|
|||||||
import { PlatformId, VersionId } from './releases'
|
|
||||||
|
|
||||||
export type RecordId = string
|
|
||||||
export type UserId = RecordId
|
|
||||||
export type InstanceId = RecordId
|
|
||||||
export type InternalInstanceId = RecordId
|
|
||||||
export type Subdomain = string
|
|
||||||
export type Port = number
|
|
||||||
export type IsoDate = string
|
|
||||||
export type ProcessId = number
|
|
||||||
export type Username = string
|
|
||||||
export type Password = string
|
|
||||||
export type CollectionName = string
|
|
||||||
export type Seconds = number
|
|
||||||
|
|
||||||
export const pocketNow = () => new Date().toISOString()
|
|
||||||
|
|
||||||
export enum InstanceStatus {
|
|
||||||
Unknown = '',
|
|
||||||
Idle = 'idle',
|
|
||||||
Port = 'porting',
|
|
||||||
Starting = 'starting',
|
|
||||||
Running = 'running',
|
|
||||||
Failed = 'failed',
|
|
||||||
}
|
|
||||||
|
|
||||||
export type InstancesRecord = {
|
|
||||||
id: RecordId
|
|
||||||
subdomain: Subdomain
|
|
||||||
uid: UserId
|
|
||||||
status: InstanceStatus
|
|
||||||
platform: PlatformId
|
|
||||||
version: VersionId
|
|
||||||
secondsThisMonth: Seconds
|
|
||||||
}
|
|
||||||
|
|
||||||
export type InstancesRecord_New = Omit<InstancesRecord, 'id'>
|
|
||||||
|
|
||||||
export type UserRecord = {
|
|
||||||
id: RecordId
|
|
||||||
email: string
|
|
||||||
verified: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
export type InvocationRecord = {
|
|
||||||
id: RecordId
|
|
||||||
instanceId: RecordId
|
|
||||||
startedAt: IsoDate
|
|
||||||
endedAt: IsoDate
|
|
||||||
pid: number
|
|
||||||
totalSeconds: number
|
|
||||||
}
|
|
||||||
|
|
||||||
export type InstanceRecordById = { [_: InstanceId]: InstancesRecord }
|
|
42
packages/common/src/schema/Backup.ts
Normal file
42
packages/common/src/schema/Backup.ts
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import { InstanceId, IsoDate, RecordId } from './types'
|
||||||
|
|
||||||
|
export enum BackupStatus {
|
||||||
|
Queued = 'queued',
|
||||||
|
Running = 'running',
|
||||||
|
FinishedSuccess = 'finished-success',
|
||||||
|
FinishedError = 'finished-error',
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BackupRecordId = RecordId
|
||||||
|
export type BackupRecord = {
|
||||||
|
id: BackupRecordId
|
||||||
|
instanceId: InstanceId
|
||||||
|
status: BackupStatus
|
||||||
|
message: string
|
||||||
|
bytes: number
|
||||||
|
created: IsoDate
|
||||||
|
updated: IsoDate
|
||||||
|
platform: string
|
||||||
|
version: string
|
||||||
|
progress: {
|
||||||
|
[_: string]: number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BackupRecord_Create = Pick<
|
||||||
|
BackupRecord,
|
||||||
|
'instanceId' | 'status' | 'platform' | 'version'
|
||||||
|
>
|
||||||
|
|
||||||
|
export type BackupRecord_Update = Partial<
|
||||||
|
Pick<
|
||||||
|
BackupRecord,
|
||||||
|
| 'instanceId'
|
||||||
|
| 'status'
|
||||||
|
| 'bytes'
|
||||||
|
| 'message'
|
||||||
|
| 'platform'
|
||||||
|
| 'version'
|
||||||
|
| 'progress'
|
||||||
|
>
|
||||||
|
>
|
23
packages/common/src/schema/Instance.ts
Normal file
23
packages/common/src/schema/Instance.ts
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import { PlatformId, VersionId } from '../releases'
|
||||||
|
import { RecordId, Seconds, Subdomain, UserId } from './types'
|
||||||
|
|
||||||
|
export enum InstanceStatus {
|
||||||
|
Unknown = '',
|
||||||
|
Idle = 'idle',
|
||||||
|
Port = 'porting',
|
||||||
|
Starting = 'starting',
|
||||||
|
Running = 'running',
|
||||||
|
Failed = 'failed',
|
||||||
|
}
|
||||||
|
|
||||||
|
export type InstancesRecord = {
|
||||||
|
id: RecordId
|
||||||
|
subdomain: Subdomain
|
||||||
|
uid: UserId
|
||||||
|
status: InstanceStatus
|
||||||
|
platform: PlatformId
|
||||||
|
version: VersionId
|
||||||
|
secondsThisMonth: Seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
export type InstancesRecord_New = Omit<InstancesRecord, 'id'>
|
10
packages/common/src/schema/Invocation.ts
Normal file
10
packages/common/src/schema/Invocation.ts
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import { IsoDate, RecordId } from './types'
|
||||||
|
|
||||||
|
export type InvocationRecord = {
|
||||||
|
id: RecordId
|
||||||
|
instanceId: RecordId
|
||||||
|
startedAt: IsoDate
|
||||||
|
endedAt: IsoDate
|
||||||
|
pid: number
|
||||||
|
totalSeconds: number
|
||||||
|
}
|
46
packages/common/src/schema/Job.ts
Normal file
46
packages/common/src/schema/Job.ts
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
import { BackupRecordId } from './Backup'
|
||||||
|
import { InstancesRecord } from './Instance'
|
||||||
|
import { InstanceId, RecordId, UserId } from './types'
|
||||||
|
|
||||||
|
export enum JobStatus {
|
||||||
|
New = 'new',
|
||||||
|
Queued = 'queued',
|
||||||
|
Running = 'running',
|
||||||
|
FinishedSuccess = 'finished-success',
|
||||||
|
FinishedError = 'finished-error',
|
||||||
|
}
|
||||||
|
|
||||||
|
export type JobPayloadBase = {
|
||||||
|
cmd: JobCommands
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum JobCommands {
|
||||||
|
BackupInstance = 'backup-instance',
|
||||||
|
RestoreInstance = 'restore-instance',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const JOB_COMMANDS = [JobCommands.BackupInstance]
|
||||||
|
|
||||||
|
export type InstanceBackupJobPayload = {
|
||||||
|
cmd: JobCommands.BackupInstance
|
||||||
|
instanceId: InstanceId
|
||||||
|
}
|
||||||
|
|
||||||
|
export type InstanceRestoreJobPayload = {
|
||||||
|
cmd: JobCommands.RestoreInstance
|
||||||
|
backupId: BackupRecordId
|
||||||
|
}
|
||||||
|
|
||||||
|
export type JobRecord<TPayload> = {
|
||||||
|
id: RecordId
|
||||||
|
userId: UserId
|
||||||
|
payload: TPayload
|
||||||
|
status: JobStatus
|
||||||
|
message: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type InstanceBackupJobRecord = JobRecord<InstanceBackupJobPayload>
|
||||||
|
|
||||||
|
export type JobRecord_In<TPayload> = Omit<JobRecord<TPayload>, 'id' | 'message'>
|
||||||
|
|
||||||
|
export type InstanceRecordById = { [_: InstanceId]: InstancesRecord }
|
7
packages/common/src/schema/User.ts
Normal file
7
packages/common/src/schema/User.ts
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import { RecordId } from './types'
|
||||||
|
|
||||||
|
export type UserRecord = {
|
||||||
|
id: RecordId
|
||||||
|
email: string
|
||||||
|
verified: boolean
|
||||||
|
}
|
7
packages/common/src/schema/index.ts
Normal file
7
packages/common/src/schema/index.ts
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
export * from './Backup'
|
||||||
|
export * from './Instance'
|
||||||
|
export * from './Invocation'
|
||||||
|
export * from './Job'
|
||||||
|
export * from './types'
|
||||||
|
export * from './User'
|
||||||
|
export * from './util'
|
12
packages/common/src/schema/types.ts
Normal file
12
packages/common/src/schema/types.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
export type RecordId = string
|
||||||
|
export type UserId = RecordId
|
||||||
|
export type InstanceId = RecordId
|
||||||
|
export type InternalInstanceId = RecordId
|
||||||
|
export type Subdomain = string
|
||||||
|
export type Port = number
|
||||||
|
export type IsoDate = string
|
||||||
|
export type ProcessId = number
|
||||||
|
export type Username = string
|
||||||
|
export type Password = string
|
||||||
|
export type CollectionName = string
|
||||||
|
export type Seconds = number
|
1
packages/common/src/schema/util.ts
Normal file
1
packages/common/src/schema/util.ts
Normal file
@ -0,0 +1 @@
|
|||||||
|
export const pocketNow = () => new Date().toISOString()
|
@ -9,11 +9,12 @@
|
|||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"strict": true,
|
"strict": true,
|
||||||
"module": "ESNext",
|
"module": "ESNext",
|
||||||
|
"target": "ESNext",
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
"noUncheckedIndexedAccess": true,
|
"noUncheckedIndexedAccess": true,
|
||||||
"strictNullChecks": true,
|
"strictNullChecks": true,
|
||||||
"noEmit": true,
|
"noEmit": true,
|
||||||
"types": ["vite/client"]
|
"types": []
|
||||||
},
|
},
|
||||||
|
|
||||||
"include": ["./src"]
|
"include": ["./src"]
|
||||||
|
@ -23,7 +23,10 @@
|
|||||||
"node-fetch": "^3.2.10",
|
"node-fetch": "^3.2.10",
|
||||||
"pocketbase": "^0.8.0-rc1",
|
"pocketbase": "^0.8.0-rc1",
|
||||||
"sqlite3": "^5.1.2",
|
"sqlite3": "^5.1.2",
|
||||||
"type-fest": "^3.1.0"
|
"type-fest": "^3.1.0",
|
||||||
|
"eventsource": "^2.0.2",
|
||||||
|
"tmp": "^0.2.1",
|
||||||
|
"@types/tmp": "^0.2.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"tsx": "^3.11.0"
|
"tsx": "^3.11.0"
|
||||||
|
82
packages/daemon/src/db/BackupMixin.ts
Normal file
82
packages/daemon/src/db/BackupMixin.ts
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import {
|
||||||
|
BackupRecord,
|
||||||
|
BackupRecordId,
|
||||||
|
BackupRecord_Create,
|
||||||
|
BackupRecord_Update,
|
||||||
|
BackupStatus,
|
||||||
|
InstanceId,
|
||||||
|
InstancesRecord,
|
||||||
|
} from '@pockethost/common'
|
||||||
|
import { safeCatch } from '../util/safeAsync'
|
||||||
|
import { MixinContext } from './PbClient'
|
||||||
|
|
||||||
|
export type BackupApi = ReturnType<typeof createBackupMixin>
|
||||||
|
|
||||||
|
export const createBackupMixin = (context: MixinContext) => {
|
||||||
|
const { client, rawDb } = context
|
||||||
|
|
||||||
|
const createBackup = safeCatch(
|
||||||
|
`createBackup`,
|
||||||
|
async (instanceId: InstanceId) => {
|
||||||
|
const instance = await client
|
||||||
|
.collection('instances')
|
||||||
|
.getOne<InstancesRecord>(instanceId)
|
||||||
|
if (!instance) {
|
||||||
|
throw new Error(`Expected ${instanceId} to be a valid instance`)
|
||||||
|
}
|
||||||
|
const { platform, version } = instance
|
||||||
|
const rec: BackupRecord_Create = {
|
||||||
|
instanceId,
|
||||||
|
status: BackupStatus.Queued,
|
||||||
|
platform,
|
||||||
|
version,
|
||||||
|
}
|
||||||
|
const created = await client
|
||||||
|
.collection('backups')
|
||||||
|
.create<BackupRecord>(rec)
|
||||||
|
return created
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const updateBackup = safeCatch(
|
||||||
|
`updateBackup`,
|
||||||
|
async (backupId: BackupRecordId, fields: BackupRecord_Update) => {
|
||||||
|
await client.collection('backups').update(backupId, fields)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const resetBackups = safeCatch(`resetBackups`, async () =>
|
||||||
|
rawDb('backups')
|
||||||
|
.whereNotIn('status', [
|
||||||
|
BackupStatus.FinishedError,
|
||||||
|
BackupStatus.FinishedSuccess,
|
||||||
|
])
|
||||||
|
.delete()
|
||||||
|
)
|
||||||
|
|
||||||
|
const getNextBackupJob = safeCatch(`getNextBackupJob`, async () => {
|
||||||
|
return client
|
||||||
|
.collection('backups')
|
||||||
|
.getList<BackupRecord>(1, 1, {
|
||||||
|
filter: `status = '${BackupStatus.Queued}'`,
|
||||||
|
})
|
||||||
|
.then((recs) => {
|
||||||
|
return recs.items[0] || null
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const getBackupJob = safeCatch(
|
||||||
|
`getBackupJob`,
|
||||||
|
async (backupId: BackupRecordId) => {
|
||||||
|
return client.collection('backups').getOne<BackupRecord>(backupId)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
createBackup,
|
||||||
|
updateBackup,
|
||||||
|
resetBackups,
|
||||||
|
getNextBackupJob,
|
||||||
|
getBackupJob,
|
||||||
|
}
|
||||||
|
}
|
110
packages/daemon/src/db/InstanceMIxin.ts
Normal file
110
packages/daemon/src/db/InstanceMIxin.ts
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
import {
|
||||||
|
assertExists,
|
||||||
|
InstanceId,
|
||||||
|
InstancesRecord,
|
||||||
|
InstanceStatus,
|
||||||
|
UserRecord,
|
||||||
|
} from '@pockethost/common'
|
||||||
|
import { reduce } from '@s-libs/micro-dash'
|
||||||
|
import Bottleneck from 'bottleneck'
|
||||||
|
import { endOfMonth, startOfMonth } from 'date-fns'
|
||||||
|
import { dbg } from '../util/dbg'
|
||||||
|
import { safeCatch } from '../util/safeAsync'
|
||||||
|
import { MixinContext } from './PbClient'
|
||||||
|
|
||||||
|
export type InstanceApi = ReturnType<typeof createInstanceMixin>
|
||||||
|
|
||||||
|
export const createInstanceMixin = (context: MixinContext) => {
|
||||||
|
const { client, rawDb } = context
|
||||||
|
|
||||||
|
const getInstanceBySubdomain = safeCatch(
|
||||||
|
`getInstanceBySubdomain`,
|
||||||
|
(subdomain: string): Promise<[InstancesRecord, UserRecord] | []> =>
|
||||||
|
client
|
||||||
|
.collection('instances')
|
||||||
|
.getFirstListItem<InstancesRecord>(`subdomain = '${subdomain}'`)
|
||||||
|
.then((instance) => {
|
||||||
|
if (!instance) return []
|
||||||
|
return client
|
||||||
|
.collection('users')
|
||||||
|
.getOne<UserRecord>(instance.uid)
|
||||||
|
.then((user) => {
|
||||||
|
return [instance, user]
|
||||||
|
})
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const updateInstance = safeCatch(
|
||||||
|
`updateInstance`,
|
||||||
|
async (instanceId: InstanceId, fields: Partial<InstancesRecord>) => {
|
||||||
|
await client.collection('instances').update(instanceId, fields)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const updateInstanceStatus = safeCatch(
|
||||||
|
`updateInstanceStatus`,
|
||||||
|
async (instanceId: InstanceId, status: InstanceStatus) => {
|
||||||
|
await updateInstance(instanceId, { status })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const getInstance = safeCatch(
|
||||||
|
`getInstance`,
|
||||||
|
async (instanceId: InstanceId) => {
|
||||||
|
return client.collection('instances').getOne<InstancesRecord>(instanceId)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const updateInstances = safeCatch(
|
||||||
|
'updateInstances',
|
||||||
|
async (cb: (rec: InstancesRecord) => Partial<InstancesRecord>) => {
|
||||||
|
const res = await client
|
||||||
|
.collection('instances')
|
||||||
|
.getFullList<InstancesRecord>(200)
|
||||||
|
const limiter = new Bottleneck({ maxConcurrent: 1 })
|
||||||
|
const promises = reduce(
|
||||||
|
res,
|
||||||
|
(c, r) => {
|
||||||
|
c.push(
|
||||||
|
limiter.schedule(() => {
|
||||||
|
const toUpdate = cb(r)
|
||||||
|
dbg(`Updating instance ${r.id} with ${JSON.stringify(toUpdate)}`)
|
||||||
|
return client.collection('instances').update(r.id, toUpdate)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
return c
|
||||||
|
},
|
||||||
|
[] as Promise<void>[]
|
||||||
|
)
|
||||||
|
await Promise.all(promises)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const updateInstanceSeconds = safeCatch(
|
||||||
|
`updateInstanceSeconds`,
|
||||||
|
async (instanceId: InstanceId, forPeriod = new Date()) => {
|
||||||
|
const startIso = startOfMonth(forPeriod).toISOString()
|
||||||
|
const endIso = endOfMonth(forPeriod).toISOString()
|
||||||
|
const query = rawDb('invocations')
|
||||||
|
.sum('totalSeconds as t')
|
||||||
|
.where('instanceId', instanceId)
|
||||||
|
.where('startedAt', '>=', startIso)
|
||||||
|
.where('startedAt', '<=', endIso)
|
||||||
|
dbg(query.toString())
|
||||||
|
const res = await query
|
||||||
|
const [row] = res
|
||||||
|
assertExists(row, `Expected row here`)
|
||||||
|
const secondsThisMonth = row.t
|
||||||
|
await updateInstance(instanceId, { secondsThisMonth })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
updateInstance,
|
||||||
|
updateInstanceStatus,
|
||||||
|
getInstanceBySubdomain,
|
||||||
|
getInstance,
|
||||||
|
updateInstanceSeconds,
|
||||||
|
updateInstances,
|
||||||
|
}
|
||||||
|
}
|
69
packages/daemon/src/db/InvocationMixin.ts
Normal file
69
packages/daemon/src/db/InvocationMixin.ts
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
import {
|
||||||
|
InstancesRecord,
|
||||||
|
InvocationRecord,
|
||||||
|
pocketNow,
|
||||||
|
} from '@pockethost/common'
|
||||||
|
import { dbg } from '../util/dbg'
|
||||||
|
import { safeCatch } from '../util/safeAsync'
|
||||||
|
import { InstanceApi } from './InstanceMIxin'
|
||||||
|
import { MixinContext } from './PbClient'
|
||||||
|
|
||||||
|
export const createInvocationMixin = (
|
||||||
|
context: MixinContext,
|
||||||
|
instanceApi: InstanceApi
|
||||||
|
) => {
|
||||||
|
const { client } = context
|
||||||
|
|
||||||
|
const createInvocation = safeCatch(
|
||||||
|
`createInvocation`,
|
||||||
|
async (instance: InstancesRecord, pid: number) => {
|
||||||
|
const init: Partial<InvocationRecord> = {
|
||||||
|
startedAt: pocketNow(),
|
||||||
|
pid,
|
||||||
|
instanceId: instance.id,
|
||||||
|
totalSeconds: 0,
|
||||||
|
}
|
||||||
|
const _inv = await client
|
||||||
|
.collection('invocations')
|
||||||
|
.create<InvocationRecord>(init)
|
||||||
|
return _inv
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const pingInvocation = safeCatch(
|
||||||
|
`pingInvocation`,
|
||||||
|
async (invocation: InvocationRecord) => {
|
||||||
|
const totalSeconds =
|
||||||
|
(+new Date() - Date.parse(invocation.startedAt)) / 1000
|
||||||
|
const toUpdate: Partial<InvocationRecord> = {
|
||||||
|
totalSeconds,
|
||||||
|
}
|
||||||
|
const _inv = await client
|
||||||
|
.collection('invocations')
|
||||||
|
.update<InvocationRecord>(invocation.id, toUpdate)
|
||||||
|
await instanceApi.updateInstanceSeconds(invocation.instanceId)
|
||||||
|
return _inv
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const finalizeInvocation = safeCatch(
|
||||||
|
`finalizeInvocation`,
|
||||||
|
async (invocation: InvocationRecord) => {
|
||||||
|
dbg('finalizing')
|
||||||
|
const totalSeconds =
|
||||||
|
(+new Date() - Date.parse(invocation.startedAt)) / 1000
|
||||||
|
const toUpdate: Partial<InvocationRecord> = {
|
||||||
|
endedAt: pocketNow(),
|
||||||
|
totalSeconds,
|
||||||
|
}
|
||||||
|
dbg({ toUpdate })
|
||||||
|
const _inv = await client
|
||||||
|
.collection('invocations')
|
||||||
|
.update<InvocationRecord>(invocation.id, toUpdate)
|
||||||
|
await instanceApi.updateInstanceSeconds(invocation.instanceId)
|
||||||
|
return _inv
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return { finalizeInvocation, pingInvocation, createInvocation }
|
||||||
|
}
|
60
packages/daemon/src/db/JobMixin.ts
Normal file
60
packages/daemon/src/db/JobMixin.ts
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
import { JobRecord, JobStatus } from '@pockethost/common'
|
||||||
|
import { safeCatch } from '../util/safeAsync'
|
||||||
|
import { MixinContext } from './PbClient'
|
||||||
|
|
||||||
|
export enum RecordSubscriptionActions {
|
||||||
|
Create = 'create',
|
||||||
|
Update = 'update',
|
||||||
|
Delete = 'delete',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const createJobMixin = (context: MixinContext) => {
|
||||||
|
const { client, rawDb } = context
|
||||||
|
const onNewJob = safeCatch(
|
||||||
|
`onNewJob`,
|
||||||
|
async (cb: (e: JobRecord<any>) => void) => {
|
||||||
|
const unsub = await client
|
||||||
|
.collection('jobs')
|
||||||
|
.subscribe<JobRecord<any>>('*', (e) => {
|
||||||
|
if (e.action !== RecordSubscriptionActions.Create) return
|
||||||
|
cb(e.record)
|
||||||
|
})
|
||||||
|
return unsub
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const resetJobs = safeCatch(`resetJobs`, async () =>
|
||||||
|
rawDb('jobs')
|
||||||
|
.whereNotIn('status', [
|
||||||
|
JobStatus.FinishedError,
|
||||||
|
JobStatus.FinishedSuccess,
|
||||||
|
])
|
||||||
|
.update({
|
||||||
|
status: JobStatus.New,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const incompleteJobs = safeCatch(`incompleteJobs`, async () => {
|
||||||
|
return client.collection('jobs').getFullList<JobRecord<any>>(100, {
|
||||||
|
filter: `status != '${JobStatus.FinishedError}' && status != '${JobStatus.FinishedSuccess}'`,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const rejectJob = safeCatch(
|
||||||
|
`rejectJob`,
|
||||||
|
async (job: JobRecord<any>, message: string) => {
|
||||||
|
return client
|
||||||
|
.collection('jobs')
|
||||||
|
.update(job.id, { status: JobStatus.FinishedError, message })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const setJobStatus = safeCatch(
|
||||||
|
`setJobStatus`,
|
||||||
|
async (job: JobRecord<any>, status: JobStatus) => {
|
||||||
|
return client.collection('jobs').update(job.id, { status })
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return { incompleteJobs, resetJobs, onNewJob, rejectJob, setJobStatus }
|
||||||
|
}
|
@ -1,22 +1,22 @@
|
|||||||
|
import { Knex } from 'knex'
|
||||||
import {
|
import {
|
||||||
assertExists,
|
Collection,
|
||||||
InstanceId,
|
default as PocketBase,
|
||||||
InstancesRecord,
|
default as pocketbaseEs,
|
||||||
InstanceStatus,
|
} from 'pocketbase'
|
||||||
InvocationRecord,
|
|
||||||
pocketNow,
|
|
||||||
UserRecord,
|
|
||||||
} from '@pockethost/common'
|
|
||||||
import { reduce } from '@s-libs/micro-dash'
|
|
||||||
import Bottleneck from 'bottleneck'
|
|
||||||
import { endOfMonth, startOfMonth } from 'date-fns'
|
|
||||||
import PocketBase, { Collection } from 'pocketbase'
|
|
||||||
import { DAEMON_PB_DATA_DIR, PUBLIC_PB_SUBDOMAIN } from '../constants'
|
import { DAEMON_PB_DATA_DIR, PUBLIC_PB_SUBDOMAIN } from '../constants'
|
||||||
import { Collection_Serialized } from '../migrate/schema'
|
import { Collection_Serialized } from '../migrate/schema'
|
||||||
import { dbg } from '../util/dbg'
|
|
||||||
import { safeCatch } from '../util/safeAsync'
|
import { safeCatch } from '../util/safeAsync'
|
||||||
|
import { createBackupMixin } from './BackupMixin'
|
||||||
|
import { createInstanceMixin } from './InstanceMIxin'
|
||||||
|
import { createInvocationMixin } from './InvocationMixin'
|
||||||
|
import { createJobMixin } from './JobMixin'
|
||||||
import { createRawPbClient } from './RawPbClient'
|
import { createRawPbClient } from './RawPbClient'
|
||||||
|
|
||||||
|
export type PocketbaseClientApi = ReturnType<typeof createPbClient>
|
||||||
|
|
||||||
|
export type MixinContext = { client: pocketbaseEs; rawDb: Knex }
|
||||||
|
|
||||||
export const createPbClient = (url: string) => {
|
export const createPbClient = (url: string) => {
|
||||||
console.log(`Initializing client: ${url}`)
|
console.log(`Initializing client: ${url}`)
|
||||||
const rawDb = createRawPbClient(
|
const rawDb = createRawPbClient(
|
||||||
@ -25,7 +25,7 @@ export const createPbClient = (url: string) => {
|
|||||||
|
|
||||||
const client = new PocketBase(url)
|
const client = new PocketBase(url)
|
||||||
client.beforeSend = (url: string, reqConfig: { [_: string]: any }) => {
|
client.beforeSend = (url: string, reqConfig: { [_: string]: any }) => {
|
||||||
dbg(reqConfig)
|
// dbg(reqConfig)
|
||||||
delete reqConfig.signal
|
delete reqConfig.signal
|
||||||
return reqConfig
|
return reqConfig
|
||||||
}
|
}
|
||||||
@ -36,107 +36,6 @@ export const createPbClient = (url: string) => {
|
|||||||
client.admins.authWithPassword(email, password)
|
client.admins.authWithPassword(email, password)
|
||||||
)
|
)
|
||||||
|
|
||||||
const getInstanceBySubdomain = safeCatch(
|
|
||||||
`getInstanceBySubdomain`,
|
|
||||||
(subdomain: string): Promise<[InstancesRecord, UserRecord] | []> =>
|
|
||||||
client
|
|
||||||
.collection('instances')
|
|
||||||
.getFirstListItem<InstancesRecord>(`subdomain = '${subdomain}'`)
|
|
||||||
.then((instance) => {
|
|
||||||
if (!instance) return []
|
|
||||||
return client
|
|
||||||
.collection('users')
|
|
||||||
.getOne<UserRecord>(instance.uid)
|
|
||||||
.then((user) => {
|
|
||||||
return [instance, user]
|
|
||||||
})
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
const updateInstance = safeCatch(
|
|
||||||
`updateInstance`,
|
|
||||||
async (instanceId: InstanceId, fields: Partial<InstancesRecord>) => {
|
|
||||||
await client.collection('instances').update(instanceId, fields)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const updateInstanceStatus = safeCatch(
|
|
||||||
`updateInstanceStatus`,
|
|
||||||
async (instanceId: InstanceId, status: InstanceStatus) => {
|
|
||||||
await updateInstance(instanceId, { status })
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const createInvocation = safeCatch(
|
|
||||||
`createInvocation`,
|
|
||||||
async (instance: InstancesRecord, pid: number) => {
|
|
||||||
const init: Partial<InvocationRecord> = {
|
|
||||||
startedAt: pocketNow(),
|
|
||||||
pid,
|
|
||||||
instanceId: instance.id,
|
|
||||||
totalSeconds: 0,
|
|
||||||
}
|
|
||||||
const _inv = await client
|
|
||||||
.collection('invocations')
|
|
||||||
.create<InvocationRecord>(init)
|
|
||||||
return _inv
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const pingInvocation = safeCatch(
|
|
||||||
`pingInvocation`,
|
|
||||||
async (invocation: InvocationRecord) => {
|
|
||||||
const totalSeconds =
|
|
||||||
(+new Date() - Date.parse(invocation.startedAt)) / 1000
|
|
||||||
const toUpdate: Partial<InvocationRecord> = {
|
|
||||||
totalSeconds,
|
|
||||||
}
|
|
||||||
const _inv = await client
|
|
||||||
.collection('invocations')
|
|
||||||
.update<InvocationRecord>(invocation.id, toUpdate)
|
|
||||||
await updateInstanceSeconds(invocation.instanceId)
|
|
||||||
return _inv
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const finalizeInvocation = safeCatch(
|
|
||||||
`finalizeInvocation`,
|
|
||||||
async (invocation: InvocationRecord) => {
|
|
||||||
dbg('finalizing')
|
|
||||||
const totalSeconds =
|
|
||||||
(+new Date() - Date.parse(invocation.startedAt)) / 1000
|
|
||||||
const toUpdate: Partial<InvocationRecord> = {
|
|
||||||
endedAt: pocketNow(),
|
|
||||||
totalSeconds,
|
|
||||||
}
|
|
||||||
dbg({ toUpdate })
|
|
||||||
const _inv = await client
|
|
||||||
.collection('invocations')
|
|
||||||
.update<InvocationRecord>(invocation.id, toUpdate)
|
|
||||||
await updateInstanceSeconds(invocation.instanceId)
|
|
||||||
return _inv
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const updateInstanceSeconds = safeCatch(
|
|
||||||
`updateInstanceSeconds`,
|
|
||||||
async (instanceId: InstanceId, forPeriod = new Date()) => {
|
|
||||||
const startIso = startOfMonth(forPeriod).toISOString()
|
|
||||||
const endIso = endOfMonth(forPeriod).toISOString()
|
|
||||||
const query = rawDb('invocations')
|
|
||||||
.sum('totalSeconds as t')
|
|
||||||
.where('instanceId', instanceId)
|
|
||||||
.where('startedAt', '>=', startIso)
|
|
||||||
.where('startedAt', '<=', endIso)
|
|
||||||
dbg(query.toString())
|
|
||||||
const res = await query
|
|
||||||
const [row] = res
|
|
||||||
assertExists(row, `Expected row here`)
|
|
||||||
const secondsThisMonth = row.t
|
|
||||||
await updateInstance(instanceId, { secondsThisMonth })
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const applySchema = safeCatch(
|
const applySchema = safeCatch(
|
||||||
`applySchema`,
|
`applySchema`,
|
||||||
async (collections: Collection_Serialized[]) => {
|
async (collections: Collection_Serialized[]) => {
|
||||||
@ -144,40 +43,22 @@ export const createPbClient = (url: string) => {
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
const updateInstances = safeCatch(
|
const context: MixinContext = { client, rawDb }
|
||||||
'updateInstances',
|
const jobsApi = createJobMixin(context)
|
||||||
async (cb: (rec: InstancesRecord) => Partial<InstancesRecord>) => {
|
const instanceApi = createInstanceMixin(context)
|
||||||
const res = await client
|
const backupApi = createBackupMixin(context)
|
||||||
.collection('instances')
|
const invocationApi = createInvocationMixin(context, instanceApi)
|
||||||
.getFullList<InstancesRecord>(200)
|
|
||||||
const limiter = new Bottleneck({ maxConcurrent: 1 })
|
|
||||||
const promises = reduce(
|
|
||||||
res,
|
|
||||||
(c, r) => {
|
|
||||||
c.push(
|
|
||||||
limiter.schedule(() => {
|
|
||||||
const toUpdate = cb(r)
|
|
||||||
dbg(`Updating instnace ${r.id} with ${JSON.stringify(toUpdate)}`)
|
|
||||||
return client.collection('instances').update(r.id, toUpdate)
|
|
||||||
})
|
|
||||||
)
|
|
||||||
return c
|
|
||||||
},
|
|
||||||
[] as Promise<void>[]
|
|
||||||
)
|
|
||||||
await Promise.all(promises)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
const api = {
|
||||||
pingInvocation,
|
client,
|
||||||
finalizeInvocation,
|
knex: rawDb,
|
||||||
createInvocation,
|
|
||||||
adminAuthViaEmail,
|
adminAuthViaEmail,
|
||||||
getInstanceBySubdomain,
|
|
||||||
updateInstanceStatus,
|
|
||||||
updateInstance,
|
|
||||||
applySchema,
|
applySchema,
|
||||||
updateInstances,
|
...jobsApi,
|
||||||
|
...instanceApi,
|
||||||
|
...invocationApi,
|
||||||
|
...backupApi,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return api
|
||||||
}
|
}
|
||||||
|
48
packages/daemon/src/migrate/applyDbMigrations.ts
Normal file
48
packages/daemon/src/migrate/applyDbMigrations.ts
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
import { binFor } from '@pockethost/common'
|
||||||
|
import {
|
||||||
|
DAEMON_PB_PASSWORD,
|
||||||
|
DAEMON_PB_PORT_BASE,
|
||||||
|
DAEMON_PB_USERNAME,
|
||||||
|
PUBLIC_PB_DOMAIN,
|
||||||
|
PUBLIC_PB_PROTOCOL,
|
||||||
|
PUBLIC_PB_SUBDOMAIN,
|
||||||
|
} from '../constants'
|
||||||
|
import { createPbClient, PocketbaseClientApi } from '../db/PbClient'
|
||||||
|
import { mkInternalUrl } from '../util/internal'
|
||||||
|
import { spawnInstance } from '../util/spawnInstance'
|
||||||
|
import { tryFetch } from '../util/tryFetch'
|
||||||
|
import { schema } from './schema'
|
||||||
|
|
||||||
|
export const applyDbMigrations = async (
|
||||||
|
cb: (client: PocketbaseClientApi) => Promise<void>
|
||||||
|
) => {
|
||||||
|
// Add `platform` and `bin` required columns (migrate db json)
|
||||||
|
try {
|
||||||
|
const mainProcess = await spawnInstance({
|
||||||
|
subdomain: PUBLIC_PB_SUBDOMAIN,
|
||||||
|
slug: PUBLIC_PB_SUBDOMAIN,
|
||||||
|
port: DAEMON_PB_PORT_BASE,
|
||||||
|
bin: binFor('lollipop'),
|
||||||
|
})
|
||||||
|
try {
|
||||||
|
const coreInternalUrl = mkInternalUrl(DAEMON_PB_PORT_BASE)
|
||||||
|
const client = createPbClient(coreInternalUrl)
|
||||||
|
await tryFetch(coreInternalUrl)
|
||||||
|
await client.adminAuthViaEmail(DAEMON_PB_USERNAME, DAEMON_PB_PASSWORD)
|
||||||
|
await client.applySchema(schema)
|
||||||
|
await cb(client)
|
||||||
|
} catch (e) {
|
||||||
|
console.error(
|
||||||
|
`***WARNING*** CANNOT AUTHENTICATE TO ${PUBLIC_PB_PROTOCOL}://${PUBLIC_PB_SUBDOMAIN}.${PUBLIC_PB_DOMAIN}/_/`
|
||||||
|
)
|
||||||
|
console.error(
|
||||||
|
`***WARNING*** LOG IN MANUALLY, ADJUST .env, AND RESTART DOCKER`
|
||||||
|
)
|
||||||
|
} finally {
|
||||||
|
console.log(`Exiting process`)
|
||||||
|
mainProcess.kill()
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`${e}`)
|
||||||
|
}
|
||||||
|
}
|
@ -1,65 +1,44 @@
|
|||||||
import { binFor, InstanceStatus } from '@pockethost/common'
|
import { binFor, InstanceStatus } from '@pockethost/common'
|
||||||
import { chdir } from 'process'
|
import { renameSync } from 'fs'
|
||||||
|
import { resolve } from 'path'
|
||||||
import {
|
import {
|
||||||
DAEMON_PB_BIN_DIR,
|
DAEMON_PB_BIN_DIR,
|
||||||
DAEMON_PB_DATA_DIR,
|
DAEMON_PB_DATA_DIR,
|
||||||
DAEMON_PB_PASSWORD,
|
|
||||||
DAEMON_PB_PORT_BASE,
|
|
||||||
DAEMON_PB_USERNAME,
|
|
||||||
PUBLIC_PB_DOMAIN,
|
|
||||||
PUBLIC_PB_PROTOCOL,
|
|
||||||
PUBLIC_PB_SUBDOMAIN,
|
PUBLIC_PB_SUBDOMAIN,
|
||||||
} from '../constants'
|
} from '../constants'
|
||||||
import { createPbClient } from '../db/PbClient'
|
import { backupInstance } from '../util/backupInstance'
|
||||||
import { mkInternalUrl } from '../util/internal'
|
import { error } from '../util/dbg'
|
||||||
import { tryFetch } from '../util/tryFetch'
|
import { applyDbMigrations } from './applyDbMigrations'
|
||||||
import { _spawn } from '../util/_spawn'
|
|
||||||
import { pexec } from './pexec'
|
import { pexec } from './pexec'
|
||||||
import { schema } from './schema'
|
|
||||||
|
|
||||||
const PB_BIN = `${DAEMON_PB_BIN_DIR}/${binFor('lollipop')}`
|
const PB_BIN = resolve(DAEMON_PB_BIN_DIR, binFor('lollipop'))
|
||||||
const DATA_ROOT = `${DAEMON_PB_DATA_DIR}/${PUBLIC_PB_SUBDOMAIN}`
|
|
||||||
|
|
||||||
;(async () => {
|
;(async () => {
|
||||||
console.log(`Backing up`)
|
await backupInstance(
|
||||||
chdir(DATA_ROOT)
|
PUBLIC_PB_SUBDOMAIN,
|
||||||
await pexec(`tar -czvf ${+new Date()}.tgz pb_data`)
|
`${+new Date()}`,
|
||||||
|
async (progress) => {
|
||||||
|
console.log(progress)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
console.log(`Upgrading`)
|
console.log(`Upgrading`)
|
||||||
await pexec(`${PB_BIN} upgrade --dir=pb_data`)
|
await pexec(`${PB_BIN} upgrade --dir=pb_data`)
|
||||||
|
|
||||||
// Add `platform` and `bin` required columns (migrate db json)
|
await applyDbMigrations(async (client) => {
|
||||||
try {
|
|
||||||
const mainProcess = await _spawn({
|
|
||||||
subdomain: PUBLIC_PB_SUBDOMAIN,
|
|
||||||
port: DAEMON_PB_PORT_BASE,
|
|
||||||
bin: binFor('lollipop'),
|
|
||||||
})
|
|
||||||
try {
|
|
||||||
const coreInternalUrl = mkInternalUrl(DAEMON_PB_PORT_BASE)
|
|
||||||
const client = createPbClient(coreInternalUrl)
|
|
||||||
await tryFetch(coreInternalUrl)
|
|
||||||
await client.adminAuthViaEmail(DAEMON_PB_USERNAME, DAEMON_PB_PASSWORD)
|
|
||||||
await client.applySchema(schema)
|
|
||||||
await client.updateInstances((instance) => {
|
await client.updateInstances((instance) => {
|
||||||
|
const src = resolve(DAEMON_PB_DATA_DIR, instance.subdomain)
|
||||||
|
const dst = resolve(DAEMON_PB_DATA_DIR, instance.id)
|
||||||
|
try {
|
||||||
|
renameSync(src, dst)
|
||||||
|
} catch (e) {
|
||||||
|
error(`${e}`)
|
||||||
|
}
|
||||||
return {
|
return {
|
||||||
status: instance.status || InstanceStatus.Idle,
|
status: InstanceStatus.Idle,
|
||||||
platform: instance.platform || 'ermine',
|
platform: instance.platform || 'ermine',
|
||||||
version: instance.version || 'latest',
|
version: instance.version || 'latest',
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
} catch (e) {
|
})
|
||||||
console.error(
|
|
||||||
`***WARNING*** CANNOT AUTHENTICATE TO ${PUBLIC_PB_PROTOCOL}://${PUBLIC_PB_SUBDOMAIN}.${PUBLIC_PB_DOMAIN}/_/`
|
|
||||||
)
|
|
||||||
console.error(
|
|
||||||
`***WARNING*** LOG IN MANUALLY, ADJUST .env, AND RESTART DOCKER`
|
|
||||||
)
|
|
||||||
} finally {
|
|
||||||
console.log(`Exiting process`)
|
|
||||||
mainProcess.kill()
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
console.error(`${e}`)
|
|
||||||
}
|
|
||||||
})()
|
})()
|
||||||
|
@ -91,7 +91,7 @@ export const schema: Collection_Serialized[] = [
|
|||||||
],
|
],
|
||||||
listRule: 'uid=@request.auth.id',
|
listRule: 'uid=@request.auth.id',
|
||||||
viewRule: 'uid = @request.auth.id',
|
viewRule: 'uid = @request.auth.id',
|
||||||
createRule: "uid = @request.auth.id && (status = 'idle' || status = '')",
|
createRule: "uid = @request.auth.id && (status = 'idle')",
|
||||||
updateRule: null,
|
updateRule: null,
|
||||||
deleteRule: null,
|
deleteRule: null,
|
||||||
options: {},
|
options: {},
|
||||||
@ -227,4 +227,166 @@ export const schema: Collection_Serialized[] = [
|
|||||||
deleteRule: null,
|
deleteRule: null,
|
||||||
options: {},
|
options: {},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: 'v7s41iokt1vizxd',
|
||||||
|
name: 'jobs',
|
||||||
|
type: 'base',
|
||||||
|
system: false,
|
||||||
|
schema: [
|
||||||
|
{
|
||||||
|
id: 'yv38czcf',
|
||||||
|
name: 'userId',
|
||||||
|
type: 'relation',
|
||||||
|
system: false,
|
||||||
|
required: true,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
maxSelect: 1,
|
||||||
|
collectionId: 'systemprofiles0',
|
||||||
|
cascadeDelete: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'tgvaxwfv',
|
||||||
|
name: 'payload',
|
||||||
|
type: 'json',
|
||||||
|
system: false,
|
||||||
|
required: true,
|
||||||
|
unique: false,
|
||||||
|
options: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'zede8pci',
|
||||||
|
name: 'status',
|
||||||
|
type: 'text',
|
||||||
|
system: false,
|
||||||
|
required: true,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
pattern: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'feovwsbr',
|
||||||
|
name: 'message',
|
||||||
|
type: 'text',
|
||||||
|
system: false,
|
||||||
|
required: false,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
pattern: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
listRule: 'userId = @request.auth.id',
|
||||||
|
viewRule: 'userId = @request.auth.id',
|
||||||
|
createRule: "userId = @request.auth.id && status='new'",
|
||||||
|
updateRule: null,
|
||||||
|
deleteRule: null,
|
||||||
|
options: {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '72clb6v41bzsay9',
|
||||||
|
name: 'backups',
|
||||||
|
type: 'base',
|
||||||
|
system: false,
|
||||||
|
schema: [
|
||||||
|
{
|
||||||
|
id: 'someqtjw',
|
||||||
|
name: 'message',
|
||||||
|
type: 'text',
|
||||||
|
system: false,
|
||||||
|
required: false,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
pattern: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'jk4zwiaj',
|
||||||
|
name: 'instanceId',
|
||||||
|
type: 'relation',
|
||||||
|
system: false,
|
||||||
|
required: true,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
maxSelect: 1,
|
||||||
|
collectionId: 'etae8tuiaxl6xfv',
|
||||||
|
cascadeDelete: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'wsy3l5gm',
|
||||||
|
name: 'status',
|
||||||
|
type: 'text',
|
||||||
|
system: false,
|
||||||
|
required: true,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
pattern: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'gmkrc5d9',
|
||||||
|
name: 'bytes',
|
||||||
|
type: 'number',
|
||||||
|
system: false,
|
||||||
|
required: false,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: '4lmammjz',
|
||||||
|
name: 'platform',
|
||||||
|
type: 'text',
|
||||||
|
system: false,
|
||||||
|
required: true,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
pattern: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'fheqxmbj',
|
||||||
|
name: 'version',
|
||||||
|
type: 'text',
|
||||||
|
system: false,
|
||||||
|
required: true,
|
||||||
|
unique: false,
|
||||||
|
options: {
|
||||||
|
min: null,
|
||||||
|
max: null,
|
||||||
|
pattern: '',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'cinbmdwe',
|
||||||
|
name: 'progress',
|
||||||
|
type: 'json',
|
||||||
|
system: false,
|
||||||
|
required: false,
|
||||||
|
unique: false,
|
||||||
|
options: {},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
listRule: '@request.auth.id = instanceId.uid',
|
||||||
|
viewRule: null,
|
||||||
|
createRule: null,
|
||||||
|
updateRule: null,
|
||||||
|
deleteRule: null,
|
||||||
|
options: {},
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
@ -1,7 +1,41 @@
|
|||||||
import { createProxyServer } from './ProxyServer'
|
import { binFor } from '@pockethost/common'
|
||||||
createProxyServer().then((api) => {
|
import { DAEMON_PB_PORT_BASE, PUBLIC_PB_SUBDOMAIN } from './constants'
|
||||||
|
import { createPbClient } from './db/PbClient'
|
||||||
|
import { createBackupService } from './services/BackupService'
|
||||||
|
import { createInstanceService } from './services/InstanceService'
|
||||||
|
import { createJobService } from './services/JobService'
|
||||||
|
import { createProxyService } from './services/ProxyService'
|
||||||
|
import { mkInternalUrl } from './util/internal'
|
||||||
|
import { spawnInstance } from './util/spawnInstance'
|
||||||
|
// npm install eventsource --save
|
||||||
|
global.EventSource = require('eventsource')
|
||||||
|
;(async () => {
|
||||||
|
const coreInternalUrl = mkInternalUrl(DAEMON_PB_PORT_BASE)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Launch central database
|
||||||
|
*/
|
||||||
|
const mainProcess = await spawnInstance({
|
||||||
|
subdomain: PUBLIC_PB_SUBDOMAIN,
|
||||||
|
slug: PUBLIC_PB_SUBDOMAIN,
|
||||||
|
port: DAEMON_PB_PORT_BASE,
|
||||||
|
bin: binFor('lollipop'),
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Launch services
|
||||||
|
*/
|
||||||
|
const client = createPbClient(coreInternalUrl)
|
||||||
|
const instanceService = await createInstanceService(client)
|
||||||
|
const proxyService = await createProxyService(instanceService)
|
||||||
|
const jobService = await createJobService(client)
|
||||||
|
const backupService = await createBackupService(client, jobService)
|
||||||
|
|
||||||
process.once('SIGUSR2', async () => {
|
process.once('SIGUSR2', async () => {
|
||||||
console.log(`SIGUSR2 detected`)
|
console.log(`SIGUSR2 detected`)
|
||||||
api.shutdown()
|
proxyService.shutdown()
|
||||||
})
|
instanceService.shutdown()
|
||||||
|
jobService.shutdown()
|
||||||
|
backupService.shutdown()
|
||||||
})
|
})
|
||||||
|
})()
|
||||||
|
105
packages/daemon/src/services/BackupService.ts
Normal file
105
packages/daemon/src/services/BackupService.ts
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
import {
|
||||||
|
assertTruthy,
|
||||||
|
BackupStatus,
|
||||||
|
createTimerManager,
|
||||||
|
InstanceBackupJobPayload,
|
||||||
|
InstanceRestoreJobPayload,
|
||||||
|
JobCommands,
|
||||||
|
} from '@pockethost/common'
|
||||||
|
import { PocketbaseClientApi } from '../db/PbClient'
|
||||||
|
import { backupInstance } from '../util/backupInstance'
|
||||||
|
import { dbg } from '../util/dbg'
|
||||||
|
import { JobServiceApi } from './JobService'
|
||||||
|
|
||||||
|
export const createBackupService = async (
|
||||||
|
client: PocketbaseClientApi,
|
||||||
|
jobService: JobServiceApi
|
||||||
|
) => {
|
||||||
|
jobService.registerCommand<InstanceBackupJobPayload>(
|
||||||
|
JobCommands.BackupInstance,
|
||||||
|
async (unsafeJob) => {
|
||||||
|
const unsafePayload = unsafeJob.payload
|
||||||
|
const { instanceId } = unsafePayload
|
||||||
|
assertTruthy(instanceId, `Expected instanceId here`)
|
||||||
|
const instance = await client.getInstance(instanceId)
|
||||||
|
assertTruthy(instance, `Instance ${instanceId} not found`)
|
||||||
|
assertTruthy(
|
||||||
|
instance.uid === unsafeJob.userId,
|
||||||
|
`Instance ${instanceId} is not owned by user ${unsafeJob.userId}`
|
||||||
|
)
|
||||||
|
await client.createBackup(instance.id)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
jobService.registerCommand<InstanceRestoreJobPayload>(
|
||||||
|
JobCommands.RestoreInstance,
|
||||||
|
async (unsafeJob) => {
|
||||||
|
const unsafePayload = unsafeJob.payload
|
||||||
|
const { backupId } = unsafePayload
|
||||||
|
assertTruthy(backupId, `Expected backupId here`)
|
||||||
|
const backup = await client.getBackupJob(backupId)
|
||||||
|
assertTruthy(backup, `Backup ${backupId} not found`)
|
||||||
|
const instance = await client.getInstance(backup.instanceId)
|
||||||
|
assertTruthy(instance, `Instance ${backup.instanceId} not found`)
|
||||||
|
assertTruthy(
|
||||||
|
instance.uid === unsafeJob.userId,
|
||||||
|
`Backup ${backupId} is not owned by user ${unsafeJob.userId}`
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Restore strategy:
|
||||||
|
*
|
||||||
|
* 1. Place instance in maintenance mode
|
||||||
|
* 2. Shut down instance
|
||||||
|
* 3. Back up
|
||||||
|
* 4. Restore
|
||||||
|
* 5. Lift maintenance mode
|
||||||
|
*/
|
||||||
|
await client.createBackup(instance.id)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const tm = createTimerManager({})
|
||||||
|
tm.repeat(async () => {
|
||||||
|
const backupRec = await client.getNextBackupJob()
|
||||||
|
if (!backupRec) {
|
||||||
|
dbg(`No backups requested`)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
const instance = await client.getInstance(backupRec.instanceId)
|
||||||
|
try {
|
||||||
|
await client.updateBackup(backupRec.id, {
|
||||||
|
status: BackupStatus.Running,
|
||||||
|
})
|
||||||
|
let progress = backupRec.progress || {}
|
||||||
|
const bytes = await backupInstance(
|
||||||
|
instance.id,
|
||||||
|
backupRec.id,
|
||||||
|
(_progress) => {
|
||||||
|
progress = { ...progress, ..._progress }
|
||||||
|
dbg(_progress)
|
||||||
|
return client.updateBackup(backupRec.id, {
|
||||||
|
progress,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
)
|
||||||
|
await client.updateBackup(backupRec.id, {
|
||||||
|
bytes,
|
||||||
|
status: BackupStatus.FinishedSuccess,
|
||||||
|
})
|
||||||
|
} catch (e) {
|
||||||
|
await client.updateBackup(backupRec.id, {
|
||||||
|
status: BackupStatus.FinishedError,
|
||||||
|
message: `${e}`,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}, 1000)
|
||||||
|
|
||||||
|
const shutdown = () => {
|
||||||
|
tm.shutdown()
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
shutdown,
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,14 @@
|
|||||||
import { assertTruthy, binFor, InstanceStatus } from '@pockethost/common'
|
import {
|
||||||
|
assertTruthy,
|
||||||
|
binFor,
|
||||||
|
createTimerManager,
|
||||||
|
InstanceId,
|
||||||
|
InstanceStatus,
|
||||||
|
} from '@pockethost/common'
|
||||||
import { forEachRight, map } from '@s-libs/micro-dash'
|
import { forEachRight, map } from '@s-libs/micro-dash'
|
||||||
import Bottleneck from 'bottleneck'
|
import Bottleneck from 'bottleneck'
|
||||||
import getPort from 'get-port'
|
import getPort from 'get-port'
|
||||||
|
import { AsyncReturnType } from 'type-fest'
|
||||||
import {
|
import {
|
||||||
DAEMON_PB_IDLE_TTL,
|
DAEMON_PB_IDLE_TTL,
|
||||||
DAEMON_PB_PASSWORD,
|
DAEMON_PB_PASSWORD,
|
||||||
@ -12,16 +19,15 @@ import {
|
|||||||
PUBLIC_PB_DOMAIN,
|
PUBLIC_PB_DOMAIN,
|
||||||
PUBLIC_PB_PROTOCOL,
|
PUBLIC_PB_PROTOCOL,
|
||||||
PUBLIC_PB_SUBDOMAIN,
|
PUBLIC_PB_SUBDOMAIN,
|
||||||
} from './constants'
|
} from '../constants'
|
||||||
import { createPbClient } from './db/PbClient'
|
import { PocketbaseClientApi } from '../db/PbClient'
|
||||||
import { dbg, error } from './util/dbg'
|
import { dbg, error } from '../util/dbg'
|
||||||
import { mkInternalUrl } from './util/internal'
|
import { mkInternalUrl } from '../util/internal'
|
||||||
import { now } from './util/now'
|
import { now } from '../util/now'
|
||||||
import { safeCatch } from './util/safeAsync'
|
import { safeCatch } from '../util/safeAsync'
|
||||||
import { tryFetch } from './util/tryFetch'
|
import { PocketbaseProcess, spawnInstance } from '../util/spawnInstance'
|
||||||
import { PocketbaseProcess, _spawn } from './util/_spawn'
|
|
||||||
|
|
||||||
type Instance = {
|
type InstanceApi = {
|
||||||
process: PocketbaseProcess
|
process: PocketbaseProcess
|
||||||
internalUrl: string
|
internalUrl: string
|
||||||
port: number
|
port: number
|
||||||
@ -29,27 +35,10 @@ type Instance = {
|
|||||||
startRequest: () => () => void
|
startRequest: () => () => void
|
||||||
}
|
}
|
||||||
|
|
||||||
export const createInstanceManger = async () => {
|
export type InstanceServiceApi = AsyncReturnType<typeof createInstanceService>
|
||||||
const instances: { [_: string]: Instance } = {}
|
export const createInstanceService = async (client: PocketbaseClientApi) => {
|
||||||
|
const instances: { [_: string]: InstanceApi } = {}
|
||||||
|
|
||||||
const coreInternalUrl = mkInternalUrl(DAEMON_PB_PORT_BASE)
|
|
||||||
const client = createPbClient(coreInternalUrl)
|
|
||||||
const mainProcess = await _spawn({
|
|
||||||
subdomain: PUBLIC_PB_SUBDOMAIN,
|
|
||||||
port: DAEMON_PB_PORT_BASE,
|
|
||||||
bin: binFor('lollipop'),
|
|
||||||
})
|
|
||||||
instances[PUBLIC_PB_SUBDOMAIN] = {
|
|
||||||
process: mainProcess,
|
|
||||||
internalUrl: coreInternalUrl,
|
|
||||||
port: DAEMON_PB_PORT_BASE,
|
|
||||||
shutdown: async () => {
|
|
||||||
dbg(`Shutting down instance ${PUBLIC_PB_SUBDOMAIN}`)
|
|
||||||
mainProcess.kill()
|
|
||||||
},
|
|
||||||
startRequest: () => () => {},
|
|
||||||
}
|
|
||||||
await tryFetch(coreInternalUrl)
|
|
||||||
try {
|
try {
|
||||||
await client.adminAuthViaEmail(DAEMON_PB_USERNAME, DAEMON_PB_PASSWORD)
|
await client.adminAuthViaEmail(DAEMON_PB_USERNAME, DAEMON_PB_PASSWORD)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@ -100,8 +89,9 @@ export const createInstanceManger = async () => {
|
|||||||
|
|
||||||
await client.updateInstanceStatus(instance.id, InstanceStatus.Starting)
|
await client.updateInstanceStatus(instance.id, InstanceStatus.Starting)
|
||||||
|
|
||||||
const childProcess = await _spawn({
|
const childProcess = await spawnInstance({
|
||||||
subdomain,
|
subdomain,
|
||||||
|
slug: instance.id,
|
||||||
port: newPort,
|
port: newPort,
|
||||||
bin: binFor(instance.platform, instance.version),
|
bin: binFor(instance.platform, instance.version),
|
||||||
onUnexpectedStop: (code) => {
|
onUnexpectedStop: (code) => {
|
||||||
@ -113,30 +103,28 @@ export const createInstanceManger = async () => {
|
|||||||
assertTruthy(pid, `Expected PID here but got ${pid}`)
|
assertTruthy(pid, `Expected PID here but got ${pid}`)
|
||||||
|
|
||||||
const invocation = await client.createInvocation(instance, pid)
|
const invocation = await client.createInvocation(instance, pid)
|
||||||
const api: Instance = (() => {
|
const tm = createTimerManager({})
|
||||||
|
const api: InstanceApi = (() => {
|
||||||
let openRequestCount = 0
|
let openRequestCount = 0
|
||||||
let lastRequest = now()
|
let lastRequest = now()
|
||||||
let tid: ReturnType<typeof setTimeout>
|
|
||||||
const internalUrl = mkInternalUrl(newPort)
|
const internalUrl = mkInternalUrl(newPort)
|
||||||
|
|
||||||
const RECHECK_TTL = 1000 // 1 second
|
const RECHECK_TTL = 1000 // 1 second
|
||||||
const _api: Instance = {
|
const _api: InstanceApi = {
|
||||||
process: childProcess,
|
process: childProcess,
|
||||||
internalUrl,
|
internalUrl,
|
||||||
port: newPort,
|
port: newPort,
|
||||||
shutdown: safeCatch(
|
shutdown: safeCatch(
|
||||||
`Instance ${subdomain} invocation ${invocation.id} pid ${pid} shutdown`,
|
`Instance ${subdomain} invocation ${invocation.id} pid ${pid} shutdown`,
|
||||||
async () => {
|
async () => {
|
||||||
clearTimeout(tid)
|
tm.shutdown()
|
||||||
await client.finalizeInvocation(invocation)
|
await client.finalizeInvocation(invocation)
|
||||||
const res = childProcess.kill()
|
const res = childProcess.kill()
|
||||||
delete instances[subdomain]
|
delete instances[subdomain]
|
||||||
if (subdomain !== PUBLIC_PB_SUBDOMAIN) {
|
|
||||||
await client.updateInstanceStatus(
|
await client.updateInstanceStatus(
|
||||||
instance.id,
|
instance.id,
|
||||||
InstanceStatus.Idle
|
InstanceStatus.Idle
|
||||||
)
|
)
|
||||||
}
|
|
||||||
assertTruthy(
|
assertTruthy(
|
||||||
res,
|
res,
|
||||||
`Expected child process to exit gracefully but got ${res}`
|
`Expected child process to exit gracefully but got ${res}`
|
||||||
@ -156,24 +144,36 @@ export const createInstanceManger = async () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
/**
|
tm.repeat(
|
||||||
* Heartbeat and idle shutdown
|
safeCatch(`idleCheck`, async () => {
|
||||||
*/
|
dbg(`${subdomain} idle check: ${openRequestCount} open requests`)
|
||||||
const _beat = async () => {
|
|
||||||
dbg(`${subdomain} heartbeat: ${openRequestCount} open requests`)
|
|
||||||
await client.pingInvocation(invocation)
|
|
||||||
if (
|
if (
|
||||||
openRequestCount === 0 &&
|
openRequestCount === 0 &&
|
||||||
lastRequest + DAEMON_PB_IDLE_TTL < now()
|
lastRequest + DAEMON_PB_IDLE_TTL < now()
|
||||||
) {
|
) {
|
||||||
dbg(`${subdomain} idle for ${DAEMON_PB_IDLE_TTL}, shutting down`)
|
dbg(
|
||||||
|
`${subdomain} idle for ${DAEMON_PB_IDLE_TTL}, shutting down`
|
||||||
|
)
|
||||||
await _api.shutdown()
|
await _api.shutdown()
|
||||||
|
return false
|
||||||
} else {
|
} else {
|
||||||
dbg(`${openRequestCount} requests remain open on ${subdomain}`)
|
dbg(`${openRequestCount} requests remain open on ${subdomain}`)
|
||||||
tid = setTimeout(_beat, RECHECK_TTL)
|
|
||||||
}
|
}
|
||||||
|
return true
|
||||||
|
}),
|
||||||
|
RECHECK_TTL
|
||||||
|
)
|
||||||
}
|
}
|
||||||
_beat()
|
|
||||||
|
{
|
||||||
|
tm.repeat(
|
||||||
|
safeCatch(`uptime`, async () => {
|
||||||
|
dbg(`${subdomain} uptime`)
|
||||||
|
await client.pingInvocation(invocation)
|
||||||
|
return true
|
||||||
|
}),
|
||||||
|
1000
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return _api
|
return _api
|
||||||
@ -191,5 +191,7 @@ export const createInstanceManger = async () => {
|
|||||||
instance.shutdown()
|
instance.shutdown()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return { getInstance, shutdown }
|
|
||||||
|
const maintenance = async (instanceId: InstanceId) => {}
|
||||||
|
return { getInstance, shutdown, maintenance }
|
||||||
}
|
}
|
84
packages/daemon/src/services/JobService.ts
Normal file
84
packages/daemon/src/services/JobService.ts
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
import {
|
||||||
|
assertTruthy,
|
||||||
|
JobCommands,
|
||||||
|
JobPayloadBase,
|
||||||
|
JobRecord,
|
||||||
|
JobStatus,
|
||||||
|
} from '@pockethost/common'
|
||||||
|
import { isObject } from '@s-libs/micro-dash'
|
||||||
|
import Bottleneck from 'bottleneck'
|
||||||
|
import { default as knexFactory } from 'knex'
|
||||||
|
import pocketbaseEs from 'pocketbase'
|
||||||
|
import { AsyncReturnType } from 'type-fest'
|
||||||
|
import { PocketbaseClientApi } from '../db/PbClient'
|
||||||
|
import { error } from '../util/dbg'
|
||||||
|
|
||||||
|
export type JobServiceApi = AsyncReturnType<typeof createJobService>
|
||||||
|
|
||||||
|
export type KnexApi = ReturnType<typeof knexFactory>
|
||||||
|
export type CommandModuleInitializer = (
|
||||||
|
register: JobServiceApi['registerCommand'],
|
||||||
|
client: pocketbaseEs,
|
||||||
|
knex: KnexApi
|
||||||
|
) => void
|
||||||
|
|
||||||
|
export type JobHandler<TPayload> = (
|
||||||
|
unsafeJob: JobRecord<Partial<TPayload>>
|
||||||
|
) => Promise<void>
|
||||||
|
|
||||||
|
export const createJobService = async (client: PocketbaseClientApi) => {
|
||||||
|
const limiter = new Bottleneck({ maxConcurrent: 1 })
|
||||||
|
|
||||||
|
const jobHandlers: {
|
||||||
|
[_ in JobCommands]?: JobHandler<any>
|
||||||
|
} = {}
|
||||||
|
|
||||||
|
const run = async (job: JobRecord<any>) =>
|
||||||
|
limiter.schedule(async () => {
|
||||||
|
try {
|
||||||
|
await client.setJobStatus(job, JobStatus.Queued)
|
||||||
|
const { payload } = job
|
||||||
|
assertTruthy(isObject(payload), `Payload must be an object`)
|
||||||
|
const unsafePayload = payload as Partial<JobPayloadBase>
|
||||||
|
const { cmd } = unsafePayload
|
||||||
|
assertTruthy(cmd, `Payload must contain command`)
|
||||||
|
const handler = jobHandlers[cmd]
|
||||||
|
if (!handler) {
|
||||||
|
throw new Error(`Job handler ${cmd} is not registered`)
|
||||||
|
}
|
||||||
|
console.log(`Running job ${job.id}`, job)
|
||||||
|
await client.setJobStatus(job, JobStatus.Running)
|
||||||
|
await handler(job)
|
||||||
|
await client.setJobStatus(job, JobStatus.FinishedSuccess)
|
||||||
|
} catch (e) {
|
||||||
|
await client.rejectJob(job, `${e}`).catch((e) => {
|
||||||
|
error(`job ${job.id} failed to reject with ${e}`)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const unsub = await client.onNewJob(run)
|
||||||
|
await client.resetJobs()
|
||||||
|
await client.resetBackups()
|
||||||
|
const jobs = await client.incompleteJobs()
|
||||||
|
jobs.forEach(run)
|
||||||
|
|
||||||
|
const shutdown = () => {
|
||||||
|
unsub()
|
||||||
|
}
|
||||||
|
|
||||||
|
const registerCommand = <TPayload>(
|
||||||
|
commandName: JobCommands,
|
||||||
|
handler: JobHandler<TPayload>
|
||||||
|
) => {
|
||||||
|
if (jobHandlers[commandName]) {
|
||||||
|
throw new Error(`${commandName} job handler already registered.`)
|
||||||
|
}
|
||||||
|
jobHandlers[commandName] = handler
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
registerCommand,
|
||||||
|
shutdown,
|
||||||
|
}
|
||||||
|
}
|
@ -1,12 +1,21 @@
|
|||||||
import { createServer } from 'http'
|
import { createServer } from 'http'
|
||||||
import httpProxy from 'http-proxy'
|
import httpProxy from 'http-proxy'
|
||||||
import { PUBLIC_APP_DOMAIN, PUBLIC_APP_PROTOCOL } from './constants'
|
import { AsyncReturnType } from 'type-fest'
|
||||||
import { createInstanceManger } from './InstanceManager'
|
import {
|
||||||
import { dbg, info } from './util/dbg'
|
DAEMON_PB_PORT_BASE,
|
||||||
|
PUBLIC_APP_DOMAIN,
|
||||||
|
PUBLIC_APP_PROTOCOL,
|
||||||
|
PUBLIC_PB_SUBDOMAIN,
|
||||||
|
} from '../constants'
|
||||||
|
import { dbg, info } from '../util/dbg'
|
||||||
|
import { mkInternalUrl } from '../util/internal'
|
||||||
|
import { InstanceServiceApi } from './InstanceService'
|
||||||
|
|
||||||
export const createProxyServer = async () => {
|
export type ProxyServiceApi = AsyncReturnType<typeof createProxyService>
|
||||||
const instanceManager = await createInstanceManger()
|
|
||||||
|
|
||||||
|
export const createProxyService = async (
|
||||||
|
instanceManager: InstanceServiceApi
|
||||||
|
) => {
|
||||||
const proxy = httpProxy.createProxyServer({})
|
const proxy = httpProxy.createProxyServer({})
|
||||||
|
|
||||||
const server = createServer(async (req, res) => {
|
const server = createServer(async (req, res) => {
|
||||||
@ -29,6 +38,13 @@ export const createProxyServer = async () => {
|
|||||||
throw new Error(`${host} has no subdomain.`)
|
throw new Error(`${host} has no subdomain.`)
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
if (subdomain === PUBLIC_PB_SUBDOMAIN) {
|
||||||
|
const target = mkInternalUrl(DAEMON_PB_PORT_BASE)
|
||||||
|
dbg(`Forwarding proxy request for ${req.url} to instance ${target}`)
|
||||||
|
proxy.web(req, res, { target })
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
const instance = await instanceManager.getInstance(subdomain)
|
const instance = await instanceManager.getInstance(subdomain)
|
||||||
if (!instance) {
|
if (!instance) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
@ -36,9 +52,14 @@ export const createProxyServer = async () => {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (req.closed) {
|
||||||
|
throw new Error(`Request already closed.`)
|
||||||
|
}
|
||||||
|
|
||||||
dbg(
|
dbg(
|
||||||
`Forwarding proxy request for ${req.url} to instance ${instance.internalUrl}`
|
`Forwarding proxy request for ${req.url} to instance ${instance.internalUrl}`
|
||||||
)
|
)
|
||||||
|
|
||||||
const endRequest = instance.startRequest()
|
const endRequest = instance.startRequest()
|
||||||
req.on('close', endRequest)
|
req.on('close', endRequest)
|
||||||
proxy.web(req, res, { target: instance.internalUrl })
|
proxy.web(req, res, { target: instance.internalUrl })
|
109
packages/daemon/src/util/backupInstance.ts
Normal file
109
packages/daemon/src/util/backupInstance.ts
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
import { BackupRecordId, InstanceId } from '@pockethost/common'
|
||||||
|
import { statSync } from 'fs'
|
||||||
|
import { basename, resolve } from 'path'
|
||||||
|
import { chdir, cwd } from 'process'
|
||||||
|
import { Database } from 'sqlite3'
|
||||||
|
import tmp from 'tmp'
|
||||||
|
import { DAEMON_PB_DATA_DIR } from '../constants'
|
||||||
|
import { pexec } from '../migrate/pexec'
|
||||||
|
import { dbg } from './dbg'
|
||||||
|
import { ensureDirExists } from './ensureDirExists'
|
||||||
|
|
||||||
|
export type BackupProgress = {
|
||||||
|
current: number
|
||||||
|
total: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ProgressInfo = {
|
||||||
|
[src: string]: number
|
||||||
|
}
|
||||||
|
export type ProgressCallback = (info: ProgressInfo) => Promise<void>
|
||||||
|
|
||||||
|
export const PB_DATA_DIR = `pb_data`
|
||||||
|
|
||||||
|
export const execBackup = (
|
||||||
|
src: string,
|
||||||
|
dst: string,
|
||||||
|
progress?: ProgressCallback
|
||||||
|
) => {
|
||||||
|
const db = new Database(src)
|
||||||
|
const backup = db.backup(dst)
|
||||||
|
return new Promise<void>((resolve, reject) => {
|
||||||
|
const _work = async () => {
|
||||||
|
if (backup.failed) {
|
||||||
|
reject()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (backup.completed) {
|
||||||
|
backup.finish()
|
||||||
|
await progress?.({
|
||||||
|
[basename(src)]: 1,
|
||||||
|
})
|
||||||
|
resolve()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const pct =
|
||||||
|
backup.remaining === -1 ? 0 : 1 - backup.remaining / backup.pageCount
|
||||||
|
dbg(pct, backup.completed, backup.failed)
|
||||||
|
await progress?.({
|
||||||
|
[basename(src)]: pct,
|
||||||
|
})
|
||||||
|
if (backup.idle) {
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
backup.step(5, () => resolve())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
setTimeout(_work, 10)
|
||||||
|
}
|
||||||
|
_work()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const backupInstance = async (
|
||||||
|
instanceId: InstanceId,
|
||||||
|
backupId: BackupRecordId,
|
||||||
|
progress?: ProgressCallback
|
||||||
|
) => {
|
||||||
|
const dataRoot = resolve(DAEMON_PB_DATA_DIR, instanceId)
|
||||||
|
const backupTgzRoot = resolve(dataRoot, 'backup')
|
||||||
|
const backupTgzFile = resolve(backupTgzRoot, `${backupId}.tgz`)
|
||||||
|
const tmpObj = tmp.dirSync({
|
||||||
|
unsafeCleanup: true,
|
||||||
|
})
|
||||||
|
const backupTmpTargetRoot = resolve(tmpObj.name)
|
||||||
|
console.log({
|
||||||
|
instanceId,
|
||||||
|
dataRoot,
|
||||||
|
backupTgzRoot,
|
||||||
|
backupTgzFile,
|
||||||
|
backupTmpTargetRoot,
|
||||||
|
})
|
||||||
|
const _cwd = cwd()
|
||||||
|
try {
|
||||||
|
dbg(`Backing up ${dataRoot}`)
|
||||||
|
chdir(dataRoot)
|
||||||
|
ensureDirExists(backupTgzRoot)
|
||||||
|
ensureDirExists(resolve(backupTmpTargetRoot, PB_DATA_DIR))
|
||||||
|
await Promise.all([
|
||||||
|
execBackup(
|
||||||
|
`pb_data/data.db`,
|
||||||
|
resolve(backupTmpTargetRoot, PB_DATA_DIR, `data.db`),
|
||||||
|
progress
|
||||||
|
),
|
||||||
|
execBackup(
|
||||||
|
`pb_data/logs.db`,
|
||||||
|
resolve(backupTmpTargetRoot, PB_DATA_DIR, `logs.db`),
|
||||||
|
progress
|
||||||
|
),
|
||||||
|
])
|
||||||
|
chdir(backupTmpTargetRoot)
|
||||||
|
await pexec(`tar -czvf ${backupTgzFile} ${PB_DATA_DIR}`)
|
||||||
|
const stats = statSync(backupTgzFile)
|
||||||
|
const bytes = stats.size
|
||||||
|
return bytes
|
||||||
|
} finally {
|
||||||
|
console.log(`Removing again ${backupTmpTargetRoot}`)
|
||||||
|
tmpObj.removeCallback()
|
||||||
|
chdir(_cwd)
|
||||||
|
}
|
||||||
|
}
|
10
packages/daemon/src/util/ensureDirExists.ts
Normal file
10
packages/daemon/src/util/ensureDirExists.ts
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
import { mkdirSync } from 'fs'
|
||||||
|
import { dbg } from './dbg'
|
||||||
|
|
||||||
|
export const ensureDirExists = (path: string) => {
|
||||||
|
try {
|
||||||
|
mkdirSync(path)
|
||||||
|
} catch (e) {
|
||||||
|
dbg(`${e}`)
|
||||||
|
}
|
||||||
|
}
|
@ -1,6 +1,6 @@
|
|||||||
import Bottleneck from 'bottleneck'
|
import Bottleneck from 'bottleneck'
|
||||||
import { ClientResponseError } from 'pocketbase'
|
import { ClientResponseError } from 'pocketbase'
|
||||||
import { dbg } from './dbg'
|
import { dbg, error } from './dbg'
|
||||||
|
|
||||||
const limiter = new Bottleneck({ maxConcurrent: 1 })
|
const limiter = new Bottleneck({ maxConcurrent: 1 })
|
||||||
|
|
||||||
@ -35,30 +35,31 @@ export const safeCatch = <TIn extends any[], TOut>(
|
|||||||
const uuid = `${name}:${_c}`
|
const uuid = `${name}:${_c}`
|
||||||
dbg(uuid, ...args)
|
dbg(uuid, ...args)
|
||||||
const tid = setTimeout(() => {
|
const tid = setTimeout(() => {
|
||||||
console.error(`ERROR: timeout waiting for ${uuid}`)
|
dbg(uuid, `WARNING: timeout waiting for ${uuid}`)
|
||||||
}, 100)
|
}, 100)
|
||||||
|
|
||||||
inside = uuid
|
inside = uuid
|
||||||
return cb(...args)
|
return cb(...args)
|
||||||
.then((res) => {
|
.then((res) => {
|
||||||
dbg(`${name}:${_c} finished`)
|
dbg(uuid, `finished`)
|
||||||
inside = ''
|
inside = ''
|
||||||
clearTimeout(tid)
|
clearTimeout(tid)
|
||||||
return res
|
return res
|
||||||
})
|
})
|
||||||
.catch((e: any) => {
|
.catch((e: any) => {
|
||||||
if (e instanceof ClientResponseError) {
|
if (e instanceof ClientResponseError) {
|
||||||
console.error(`PocketBase API error ${e}`)
|
error(uuid, `PocketBase API error ${e}`)
|
||||||
console.error(JSON.stringify(e.data, null, 2))
|
error(uuid, JSON.stringify(e.data, null, 2))
|
||||||
if (e.status === 400) {
|
if (e.status === 400) {
|
||||||
console.error(
|
error(
|
||||||
|
uuid,
|
||||||
`It looks like you don't have permission to make this request.`
|
`It looks like you don't have permission to make this request.`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.error(`${name} failed: ${e}`)
|
error(uuid, `failed: ${e}`)
|
||||||
}
|
}
|
||||||
console.error(e)
|
error(uuid, e)
|
||||||
throw e
|
throw e
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -5,15 +5,18 @@ import { DAEMON_PB_BIN_DIR, DAEMON_PB_DATA_DIR } from '../constants'
|
|||||||
import { dbg } from './dbg'
|
import { dbg } from './dbg'
|
||||||
import { mkInternalAddress, mkInternalUrl } from './internal'
|
import { mkInternalAddress, mkInternalUrl } from './internal'
|
||||||
import { tryFetch } from './tryFetch'
|
import { tryFetch } from './tryFetch'
|
||||||
export type PocketbaseProcess = AsyncReturnType<typeof _spawn>
|
export type PocketbaseProcess = AsyncReturnType<typeof spawnInstance>
|
||||||
|
|
||||||
export const _spawn = async (cfg: {
|
export type Config = {
|
||||||
subdomain: string
|
subdomain: string
|
||||||
|
slug: string
|
||||||
port: number
|
port: number
|
||||||
bin: string
|
bin: string
|
||||||
onUnexpectedStop?: (code: number | null) => void
|
onUnexpectedStop?: (code: number | null) => void
|
||||||
}) => {
|
}
|
||||||
const { subdomain, port, bin, onUnexpectedStop } = cfg
|
|
||||||
|
export const spawnInstance = async (cfg: Config) => {
|
||||||
|
const { subdomain, port, bin, onUnexpectedStop, slug } = cfg
|
||||||
const cmd = `${DAEMON_PB_BIN_DIR}/${bin}`
|
const cmd = `${DAEMON_PB_BIN_DIR}/${bin}`
|
||||||
if (!existsSync(cmd)) {
|
if (!existsSync(cmd)) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
@ -24,7 +27,7 @@ export const _spawn = async (cfg: {
|
|||||||
const args = [
|
const args = [
|
||||||
`serve`,
|
`serve`,
|
||||||
`--dir`,
|
`--dir`,
|
||||||
`${DAEMON_PB_DATA_DIR}/${subdomain}/pb_data`,
|
`${DAEMON_PB_DATA_DIR}/${slug}/pb_data`,
|
||||||
`--http`,
|
`--http`,
|
||||||
mkInternalAddress(port),
|
mkInternalAddress(port),
|
||||||
]
|
]
|
@ -1,4 +1,3 @@
|
|||||||
import fetch from 'node-fetch'
|
|
||||||
import { dbg, error } from './dbg'
|
import { dbg, error } from './dbg'
|
||||||
|
|
||||||
export const tryFetch = (url: string) =>
|
export const tryFetch = (url: string) =>
|
||||||
|
@ -3,4 +3,4 @@ PUBLIC_APP_DOMAIN = localhost
|
|||||||
PUBLIC_PB_PROTOCOL=https
|
PUBLIC_PB_PROTOCOL=https
|
||||||
PUBLIC_PB_SUBDOMAIN = pockethost-central
|
PUBLIC_PB_SUBDOMAIN = pockethost-central
|
||||||
PUBLIC_PB_DOMAIN = pockethost.io
|
PUBLIC_PB_DOMAIN = pockethost.io
|
||||||
PUBLIC_POCKETHOST_VERSION=0.4.1
|
PUBLIC_POCKETHOST_VERSION=0.5.0
|
@ -21,17 +21,20 @@
|
|||||||
"svelte-check": "^2.7.1",
|
"svelte-check": "^2.7.1",
|
||||||
"svelte-preprocess": "^4.10.6",
|
"svelte-preprocess": "^4.10.6",
|
||||||
"tslib": "^2.3.1",
|
"tslib": "^2.3.1",
|
||||||
"typescript": "^4.7.4",
|
"typescript": "^4.8.0",
|
||||||
"vite": "^3.1.0"
|
"vite": "^3.1.0"
|
||||||
},
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@pockethost/common": "0.0.1",
|
"@pockethost/common": "0.0.1",
|
||||||
"@s-libs/micro-dash": "12",
|
"@s-libs/micro-dash": "^14.1.0",
|
||||||
"@types/bootstrap": "^5.2.6",
|
"@types/bootstrap": "^5.2.6",
|
||||||
"@types/js-cookie": "^3.0.2",
|
"@types/js-cookie": "^3.0.2",
|
||||||
|
"async-mutex": "^0.4.0",
|
||||||
|
"date-fns": "^2.29.3",
|
||||||
"js-cookie": "^3.0.1",
|
"js-cookie": "^3.0.1",
|
||||||
"pocketbase": "^0.8.0-rc1",
|
"pocketbase": "^0.8.0-rc1",
|
||||||
|
"pretty-bytes": "^6.0.0",
|
||||||
"random-word-slugs": "^0.1.6",
|
"random-word-slugs": "^0.1.6",
|
||||||
"sass": "^1.54.9",
|
"sass": "^1.54.9",
|
||||||
"svelte-highlight": "^6.2.1"
|
"svelte-highlight": "^6.2.1"
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import MediaQuery from '$components/MediaQuery.svelte'
|
import MediaQuery from '$components/MediaQuery.svelte'
|
||||||
import ThemeToggle from '$components/ThemeToggle.svelte'
|
import ThemeToggle from '$components/ThemeToggle.svelte'
|
||||||
|
import { PUBLIC_POCKETHOST_VERSION } from '$src/env'
|
||||||
import { handleLogoutAndRedirect } from '$util/database'
|
import { handleLogoutAndRedirect } from '$util/database'
|
||||||
import { isUserLoggedIn } from '$util/stores'
|
import { isUserLoggedIn } from '$util/stores'
|
||||||
import AuthStateGuard from './helpers/AuthStateGuard.svelte'
|
import AuthStateGuard from './helpers/AuthStateGuard.svelte'
|
||||||
import { PUBLIC_POCKETHOST_VERSION } from '$src/env'
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<header class="container-fluid">
|
<header class="container-fluid">
|
||||||
@ -85,7 +85,7 @@
|
|||||||
href="https://github.com/benallfree/pockethost/discussions"
|
href="https://github.com/benallfree/pockethost/discussions"
|
||||||
class="nav-link btn btn-outline-dark rounded-1 d-inline-block px-3"
|
class="nav-link btn btn-outline-dark rounded-1 d-inline-block px-3"
|
||||||
target="_blank"
|
target="_blank"
|
||||||
rel="noreferrer">Discussions</a
|
rel="noreferrer">Support</a
|
||||||
>
|
>
|
||||||
</li>
|
</li>
|
||||||
|
|
||||||
|
@ -1,15 +1,28 @@
|
|||||||
import { createGenericSyncEvent } from '$util/events'
|
import { createGenericSyncEvent } from '$util/events'
|
||||||
import {
|
import {
|
||||||
assertExists,
|
assertExists,
|
||||||
createRealtimeSubscriptionManager,
|
JobCommands,
|
||||||
|
JobStatus,
|
||||||
|
type BackupRecord,
|
||||||
|
type BackupRecordId,
|
||||||
|
type InstanceBackupJobPayload,
|
||||||
|
type InstanceBackupJobRecord,
|
||||||
type InstanceId,
|
type InstanceId,
|
||||||
|
type InstanceRestoreJobPayload,
|
||||||
type InstancesRecord,
|
type InstancesRecord,
|
||||||
type InstancesRecord_New,
|
type InstancesRecord_New,
|
||||||
type RealtimeEventHandler,
|
type JobRecord,
|
||||||
|
type JobRecord_In,
|
||||||
type UserRecord
|
type UserRecord
|
||||||
} from '@pockethost/common'
|
} from '@pockethost/common'
|
||||||
import { keys, map } from '@s-libs/micro-dash'
|
import { keys, map } from '@s-libs/micro-dash'
|
||||||
import PocketBase, { Admin, BaseAuthStore, ClientResponseError, Record } from 'pocketbase'
|
import PocketBase, {
|
||||||
|
Admin,
|
||||||
|
BaseAuthStore,
|
||||||
|
ClientResponseError,
|
||||||
|
Record,
|
||||||
|
type RecordSubscription
|
||||||
|
} from 'pocketbase'
|
||||||
import type { Unsubscriber } from 'svelte/store'
|
import type { Unsubscriber } from 'svelte/store'
|
||||||
import { safeCatch } from '../util/safeCatch'
|
import { safeCatch } from '../util/safeCatch'
|
||||||
|
|
||||||
@ -26,6 +39,10 @@ export type PocketbaseClientApi = ReturnType<typeof createPocketbaseClient>
|
|||||||
|
|
||||||
export const createPocketbaseClient = (url: string) => {
|
export const createPocketbaseClient = (url: string) => {
|
||||||
const client = new PocketBase(url)
|
const client = new PocketBase(url)
|
||||||
|
client.beforeSend = (url, reqConfig) => {
|
||||||
|
delete reqConfig.signal
|
||||||
|
return reqConfig
|
||||||
|
}
|
||||||
|
|
||||||
const { authStore } = client
|
const { authStore } = client
|
||||||
|
|
||||||
@ -99,18 +116,32 @@ export const createPocketbaseClient = (url: string) => {
|
|||||||
client.collection('instances').getOne<InstancesRecord>(id)
|
client.collection('instances').getOne<InstancesRecord>(id)
|
||||||
)
|
)
|
||||||
|
|
||||||
const { subscribeOne } = createRealtimeSubscriptionManager(client)
|
const watchInstanceById = async (
|
||||||
|
|
||||||
const watchInstanceById = (
|
|
||||||
id: InstanceId,
|
id: InstanceId,
|
||||||
cb: RealtimeEventHandler<InstancesRecord>
|
cb: (data: RecordSubscription<InstancesRecord>) => void
|
||||||
): Unsubscriber => {
|
): Promise<Unsubscriber> => {
|
||||||
getInstanceById(id).then((record) => {
|
getInstanceById(id).then((record) => {
|
||||||
// console.log(`Got instnace`, record)
|
// console.log(`Got instnace`, record)
|
||||||
assertExists(record, `Expected instance ${id} here`)
|
assertExists(record, `Expected instance ${id} here`)
|
||||||
cb({ action: 'init', record })
|
cb({ action: 'init', record })
|
||||||
})
|
})
|
||||||
return subscribeOne('instances', id, cb)
|
return client.collection('instances').subscribe<InstancesRecord>(id, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
const watchBackupsByInstanceId = async (
|
||||||
|
id: InstanceId,
|
||||||
|
cb: (data: RecordSubscription<BackupRecord>) => void
|
||||||
|
): Promise<Unsubscriber> => {
|
||||||
|
const unsub = client.collection('backups').subscribe<BackupRecord>('*', (e) => {
|
||||||
|
// console.log(e.record.instanceId, id)
|
||||||
|
if (e.record.instanceId !== id) return
|
||||||
|
cb(e)
|
||||||
|
})
|
||||||
|
const existingBackups = await client
|
||||||
|
.collection('backups')
|
||||||
|
.getFullList<BackupRecord>(100, { filter: `instanceId = '${id}'` })
|
||||||
|
existingBackups.forEach((record) => cb({ action: 'init', record }))
|
||||||
|
return unsub
|
||||||
}
|
}
|
||||||
|
|
||||||
const getAllInstancesById = safeCatch(`getAllInstancesById`, async () =>
|
const getAllInstancesById = safeCatch(`getAllInstancesById`, async () =>
|
||||||
@ -142,7 +173,7 @@ export const createPocketbaseClient = (url: string) => {
|
|||||||
|
|
||||||
const getAuthStoreProps = (): AuthStoreProps => {
|
const getAuthStoreProps = (): AuthStoreProps => {
|
||||||
const { token, model, isValid } = client.authStore as AuthStoreProps
|
const { token, model, isValid } = client.authStore as AuthStoreProps
|
||||||
// console.log(`curent authstore`, { token, model, isValid })
|
// console.log(`current authStore`, { token, model, isValid })
|
||||||
if (model instanceof Admin) throw new Error(`Admin models not supported`)
|
if (model instanceof Admin) throw new Error(`Admin models not supported`)
|
||||||
if (model && !model.email) throw new Error(`Expected model to be a user here`)
|
if (model && !model.email) throw new Error(`Expected model to be a user here`)
|
||||||
return {
|
return {
|
||||||
@ -211,6 +242,49 @@ export const createPocketbaseClient = (url: string) => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const createInstanceBackupJob = safeCatch(
|
||||||
|
`createInstanceBackupJob`,
|
||||||
|
async (instanceId: InstanceId) => {
|
||||||
|
const _user = user()
|
||||||
|
assertExists(_user, `Expected user to exist here`)
|
||||||
|
const { id: userId } = _user
|
||||||
|
const job: JobRecord_In<InstanceBackupJobPayload> = {
|
||||||
|
userId,
|
||||||
|
status: JobStatus.New,
|
||||||
|
payload: {
|
||||||
|
cmd: JobCommands.BackupInstance,
|
||||||
|
instanceId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const rec = await client.collection('jobs').create<InstanceBackupJobRecord>(job)
|
||||||
|
return rec
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const createInstanceRestoreJob = safeCatch(
|
||||||
|
`createInstanceRestoreJob`,
|
||||||
|
async (backupId: BackupRecordId) => {
|
||||||
|
const _user = user()
|
||||||
|
assertExists(_user, `Expected user to exist here`)
|
||||||
|
const { id: userId } = _user
|
||||||
|
const job: JobRecord_In<InstanceRestoreJobPayload> = {
|
||||||
|
userId,
|
||||||
|
status: JobStatus.New,
|
||||||
|
payload: {
|
||||||
|
cmd: JobCommands.RestoreInstance,
|
||||||
|
backupId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const rec = await client.collection('jobs').create<JobRecord<InstanceRestoreJobPayload>>(job)
|
||||||
|
return rec
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const [onJobUpdated, fireJobUpdated] =
|
||||||
|
createGenericSyncEvent<RecordSubscription<JobRecord<any>>>()
|
||||||
|
|
||||||
|
client.collection('jobs').subscribe<JobRecord<any>>('*', fireJobUpdated)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
getAuthStoreProps,
|
getAuthStoreProps,
|
||||||
parseError,
|
parseError,
|
||||||
@ -227,6 +301,10 @@ export const createPocketbaseClient = (url: string) => {
|
|||||||
user,
|
user,
|
||||||
watchInstanceById,
|
watchInstanceById,
|
||||||
getAllInstancesById,
|
getAllInstancesById,
|
||||||
resendVerificationEmail
|
resendVerificationEmail,
|
||||||
|
watchBackupsByInstanceId,
|
||||||
|
onJobUpdated,
|
||||||
|
createInstanceBackupJob,
|
||||||
|
createInstanceRestoreJob
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,37 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { page } from '$app/stores'
|
||||||
|
import AuthStateGuard from '$components/helpers/AuthStateGuard.svelte'
|
||||||
|
import { client } from '$src/pocketbase'
|
||||||
|
import { createCleanupManagerSync } from '$util/CleanupManager'
|
||||||
|
import { assertExists } from '@pockethost/common/src/assert'
|
||||||
|
import { onDestroy, onMount } from 'svelte'
|
||||||
|
import { instance } from './store'
|
||||||
|
|
||||||
|
const { instanceId } = $page.params
|
||||||
|
|
||||||
|
const cm = createCleanupManagerSync()
|
||||||
|
onMount(async () => {
|
||||||
|
const { watchInstanceById } = client()
|
||||||
|
watchInstanceById(instanceId, (r) => {
|
||||||
|
console.log(`Handling instance update`, r)
|
||||||
|
const { action, record } = r
|
||||||
|
assertExists(record, `Expected instance here`)
|
||||||
|
instance.set(record)
|
||||||
|
}).then(cm.add)
|
||||||
|
})
|
||||||
|
onDestroy(cm.cleanupAll)
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<AuthStateGuard>
|
||||||
|
<div class="container">
|
||||||
|
{#if $instance}
|
||||||
|
<h2>{$instance.subdomain}</h2>
|
||||||
|
<slot />
|
||||||
|
<div class="text-center py-5">
|
||||||
|
<a href="/dashboard" class="btn btn-light"
|
||||||
|
><i class="bi bi-arrow-left-short" /> Back to Dashboard</a
|
||||||
|
>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</AuthStateGuard>
|
@ -1,69 +0,0 @@
|
|||||||
<script lang="ts">
|
|
||||||
import { page } from '$app/stores'
|
|
||||||
import CodeSample from '$components/CodeSample.svelte'
|
|
||||||
import ProvisioningStatus from '$components/ProvisioningStatus.svelte'
|
|
||||||
import { PUBLIC_PB_PROTOCOL } from '$env/static/public'
|
|
||||||
import { PUBLIC_PB_DOMAIN } from '$src/env'
|
|
||||||
import { client } from '$src/pocketbase'
|
|
||||||
import { humanVersion, type InstancesRecord } from '@pockethost/common'
|
|
||||||
import { assertExists } from '@pockethost/common/src/assert'
|
|
||||||
import { onDestroy, onMount } from 'svelte'
|
|
||||||
import type { Unsubscriber } from 'svelte/store'
|
|
||||||
|
|
||||||
const { instanceId } = $page.params
|
|
||||||
|
|
||||||
let instance: InstancesRecord | undefined
|
|
||||||
|
|
||||||
let url: string
|
|
||||||
let code: string = ''
|
|
||||||
let unsub: Unsubscriber = () => {}
|
|
||||||
|
|
||||||
onMount(async () => {
|
|
||||||
const { watchInstanceById } = client()
|
|
||||||
unsub = watchInstanceById(instanceId, (r) => {
|
|
||||||
console.log(`Handling instance update`, r)
|
|
||||||
const { action, record } = r
|
|
||||||
instance = record
|
|
||||||
assertExists(record, `Expected instance here`)
|
|
||||||
const { subdomain } = record
|
|
||||||
url = `${PUBLIC_PB_PROTOCOL}://${subdomain}.${PUBLIC_PB_DOMAIN}`
|
|
||||||
code = `const url = '${url}'\nconst client = new PocketBase(url)`
|
|
||||||
})
|
|
||||||
})
|
|
||||||
onDestroy(() => unsub())
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<svelte:head>
|
|
||||||
<title>Your Instance - PocketHost</title>
|
|
||||||
</svelte:head>
|
|
||||||
|
|
||||||
<div class="container">
|
|
||||||
{#if instance}
|
|
||||||
<div class="py-4">
|
|
||||||
<div class="d-flex gap-3 align-items-center mb-3">
|
|
||||||
<h1 class="mb-0">Admin URL</h1>
|
|
||||||
<ProvisioningStatus status={instance.status} />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h2><a href={`${url}/_`} target="_blank">{`${url}/_`}</a></h2>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
JavaScript:
|
|
||||||
<CodeSample {code} />
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
Running {instance.platform}
|
|
||||||
{humanVersion(instance.platform, instance.version)}
|
|
||||||
</div>
|
|
||||||
{/if}
|
|
||||||
|
|
||||||
<div class="text-center py-5">
|
|
||||||
<a href="/dashboard" class="btn btn-light"
|
|
||||||
><i class="bi bi-arrow-left-short" /> Back to Dashboard</a
|
|
||||||
>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<style lang="scss">
|
|
||||||
</style>
|
|
@ -0,0 +1,22 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { PUBLIC_PB_PROTOCOL } from '$env/static/public'
|
||||||
|
import { PUBLIC_PB_DOMAIN } from '$src/env'
|
||||||
|
import { instance } from '../store'
|
||||||
|
import Backup from './Backup.svelte'
|
||||||
|
import Code from './Code.svelte'
|
||||||
|
import Overview from './Overview.svelte'
|
||||||
|
import Restore from './Restore.svelte'
|
||||||
|
|
||||||
|
const { subdomain, status, platform, version } = $instance
|
||||||
|
const url = `${PUBLIC_PB_PROTOCOL}://${subdomain}.${PUBLIC_PB_DOMAIN}`
|
||||||
|
const code = `const url = '${url}'\nconst client = new PocketBase(url)`
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<svelte:head>
|
||||||
|
<title>{subdomain} details - PocketHost</title>
|
||||||
|
</svelte:head>
|
||||||
|
|
||||||
|
<Overview />
|
||||||
|
<Code />
|
||||||
|
<Backup />
|
||||||
|
<Restore />
|
@ -0,0 +1,108 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import AlertBar from '$components/AlertBar.svelte'
|
||||||
|
import { client } from '$src/pocketbase'
|
||||||
|
import { createCleanupManagerSync } from '$util/CleanupManager'
|
||||||
|
import {
|
||||||
|
BackupStatus,
|
||||||
|
type BackupRecord,
|
||||||
|
type BackupRecordId,
|
||||||
|
type RecordId
|
||||||
|
} from '@pockethost/common'
|
||||||
|
import { reduce, sortBy } from '@s-libs/micro-dash'
|
||||||
|
import { formatDistanceToNow } from 'date-fns'
|
||||||
|
import prettyBytes from 'pretty-bytes'
|
||||||
|
import { onDestroy, onMount } from 'svelte'
|
||||||
|
import { writable } from 'svelte/store'
|
||||||
|
import { instance } from '../store'
|
||||||
|
|
||||||
|
const cm = createCleanupManagerSync()
|
||||||
|
const backups = writable<BackupRecord[]>([])
|
||||||
|
let isBackingUp = false
|
||||||
|
onMount(async () => {
|
||||||
|
const { watchBackupsByInstanceId } = client()
|
||||||
|
watchBackupsByInstanceId($instance.id, (r) => {
|
||||||
|
// console.log(`Handling backup update`, r)
|
||||||
|
const { action, record } = r
|
||||||
|
const _backups = reduce(
|
||||||
|
$backups,
|
||||||
|
(c, b) => {
|
||||||
|
c[b.id] = b
|
||||||
|
return c
|
||||||
|
},
|
||||||
|
{} as { [_: RecordId]: BackupRecord }
|
||||||
|
)
|
||||||
|
_backups[record.id] = record
|
||||||
|
|
||||||
|
isBackingUp = false
|
||||||
|
backups.set(
|
||||||
|
sortBy(_backups, (e) => {
|
||||||
|
isBackingUp ||=
|
||||||
|
e.status !== BackupStatus.FinishedError && e.status !== BackupStatus.FinishedSuccess
|
||||||
|
return Date.parse(e.created)
|
||||||
|
}).reverse()
|
||||||
|
)
|
||||||
|
// console.log(record.id)
|
||||||
|
}).then(cm.add)
|
||||||
|
})
|
||||||
|
onDestroy(cm.cleanupAll)
|
||||||
|
|
||||||
|
const startBackup = () => {
|
||||||
|
const { createInstanceBackupJob } = client()
|
||||||
|
createInstanceBackupJob($instance.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
const restoreBackup = (backupId: BackupRecordId) => {
|
||||||
|
client().createInstanceRestoreJob(backupId)
|
||||||
|
}
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="py-4">
|
||||||
|
<h2>Backup</h2>
|
||||||
|
|
||||||
|
<div class="text-center py-5">
|
||||||
|
<button class="btn btn-light" on:click={() => startBackup()} disabled={isBackingUp}>
|
||||||
|
<i class="bi bi-safe" /> Backup Now
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
{#each $backups as { id, bytes, updated, platform, version, status, message, progress }}
|
||||||
|
<div>
|
||||||
|
{#if status === BackupStatus.FinishedSuccess}
|
||||||
|
<div class="text-success">
|
||||||
|
{platform}:{version} ({prettyBytes(bytes)}) - Finished {new Date(updated)}
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
{#if status === BackupStatus.FinishedError}
|
||||||
|
<div class="text-danger">
|
||||||
|
{platform}:{version} - Finished {new Date(updated)}
|
||||||
|
<AlertBar icon="bi bi-exclamation-triangle-fill" text={message} />
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
{#if status !== BackupStatus.FinishedError && status !== BackupStatus.FinishedSuccess}
|
||||||
|
<div class="text-warning">
|
||||||
|
{platform}:{version}
|
||||||
|
{status}
|
||||||
|
{#each Object.entries(progress || {}) as [src, pct]}
|
||||||
|
<div class="badge bg-secondary" style="margin-right: 3px">
|
||||||
|
{src}
|
||||||
|
<code>
|
||||||
|
{Math.ceil(pct * 100)}%
|
||||||
|
</code>
|
||||||
|
</div>
|
||||||
|
{/each}
|
||||||
|
Started {formatDistanceToNow(Date.parse(updated))} ago
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
{/each}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style lang="scss">
|
||||||
|
code {
|
||||||
|
width: 30px;
|
||||||
|
text-align: right;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
</style>
|
@ -0,0 +1,18 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import CodeSample from '$components/CodeSample.svelte'
|
||||||
|
import { PUBLIC_PB_PROTOCOL } from '$env/static/public'
|
||||||
|
import { PUBLIC_PB_DOMAIN } from '$src/env'
|
||||||
|
import { instance } from '../store'
|
||||||
|
|
||||||
|
const { subdomain, status, platform, version } = $instance
|
||||||
|
const url = `${PUBLIC_PB_PROTOCOL}://${subdomain}.${PUBLIC_PB_DOMAIN}`
|
||||||
|
const code = `const url = '${url}'\nconst client = new PocketBase(url)`
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="py-4">
|
||||||
|
<h2>Code Samples</h2>
|
||||||
|
<div>
|
||||||
|
JavaScript:
|
||||||
|
<CodeSample {code} />
|
||||||
|
</div>
|
||||||
|
</div>
|
@ -0,0 +1,22 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import ProvisioningStatus from '$components/ProvisioningStatus.svelte'
|
||||||
|
import { PUBLIC_PB_PROTOCOL } from '$env/static/public'
|
||||||
|
import { PUBLIC_PB_DOMAIN } from '$src/env'
|
||||||
|
import { humanVersion } from '@pockethost/common'
|
||||||
|
import { instance } from '../store'
|
||||||
|
|
||||||
|
const { subdomain, status, platform, version } = $instance
|
||||||
|
const url = `${PUBLIC_PB_PROTOCOL}://${subdomain}.${PUBLIC_PB_DOMAIN}`
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="py-4">
|
||||||
|
<h2>Overview</h2>
|
||||||
|
<ProvisioningStatus {status} />
|
||||||
|
<div>
|
||||||
|
Running {platform}
|
||||||
|
{humanVersion(platform, version)}
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
Admin URL: <a href={`${url}/_`} target="_blank">{`${url}/_`}</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
@ -0,0 +1,90 @@
|
|||||||
|
<script lang="ts">
|
||||||
|
import { PUBLIC_APP_DOMAIN } from '$env/static/public'
|
||||||
|
import { client } from '$src/pocketbase'
|
||||||
|
import { createCleanupManagerSync } from '$util/CleanupManager'
|
||||||
|
import { BackupStatus, type BackupRecord, type RecordId } from '@pockethost/common'
|
||||||
|
import { reduce, sortBy } from '@s-libs/micro-dash'
|
||||||
|
import prettyBytes from 'pretty-bytes'
|
||||||
|
import { onDestroy, onMount } from 'svelte'
|
||||||
|
import { writable } from 'svelte/store'
|
||||||
|
import { instance } from '../store'
|
||||||
|
|
||||||
|
const cm = createCleanupManagerSync()
|
||||||
|
const backups = writable<BackupRecord[]>([])
|
||||||
|
let isBackingUp = false
|
||||||
|
onMount(async () => {
|
||||||
|
const { watchBackupsByInstanceId } = client()
|
||||||
|
watchBackupsByInstanceId($instance.id, (r) => {
|
||||||
|
// console.log(`Handling backup update`, r)
|
||||||
|
const { action, record } = r
|
||||||
|
const _backups = reduce(
|
||||||
|
$backups,
|
||||||
|
(c, b) => {
|
||||||
|
c[b.id] = b
|
||||||
|
return c
|
||||||
|
},
|
||||||
|
{} as { [_: RecordId]: BackupRecord }
|
||||||
|
)
|
||||||
|
_backups[record.id] = record
|
||||||
|
|
||||||
|
isBackingUp = false
|
||||||
|
backups.set(
|
||||||
|
sortBy(_backups, (e) => {
|
||||||
|
isBackingUp ||=
|
||||||
|
e.status !== BackupStatus.FinishedError && e.status !== BackupStatus.FinishedSuccess
|
||||||
|
return Date.parse(e.created)
|
||||||
|
}).reverse()
|
||||||
|
)
|
||||||
|
// console.log(record.id)
|
||||||
|
}).then(cm.add)
|
||||||
|
})
|
||||||
|
onDestroy(cm.cleanupAll)
|
||||||
|
|
||||||
|
const startRestore = () => {
|
||||||
|
const { createInstanceBackupJob } = client()
|
||||||
|
createInstanceBackupJob($instance.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
let sourceBackupId = ''
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<div class="py-4">
|
||||||
|
<h2>Restore</h2>
|
||||||
|
|
||||||
|
{#if PUBLIC_APP_DOMAIN.toString().endsWith('.io')}
|
||||||
|
Contact support to perform a restore.
|
||||||
|
{/if}
|
||||||
|
{#if PUBLIC_APP_DOMAIN.toString().endsWith('.test')}
|
||||||
|
{#if $backups.length === 0}
|
||||||
|
You must create a backup first.
|
||||||
|
{/if}
|
||||||
|
{#if $backups.length > 0}
|
||||||
|
<select value={sourceBackupId}>
|
||||||
|
<option value=""> -- choose snapshot -- </option>
|
||||||
|
{#each $backups as { id, bytes, updated, platform, version, status, message, progress }}
|
||||||
|
{#if status === BackupStatus.FinishedSuccess}
|
||||||
|
<option value={id}>
|
||||||
|
{platform}:{version} ({prettyBytes(bytes)}) - Finished {new Date(updated)}#
|
||||||
|
</option>
|
||||||
|
{/if}
|
||||||
|
{/each}
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<div class="text-center py-5">
|
||||||
|
<div class="text-danger">
|
||||||
|
Notice: Your instance will be placed in maintenance mode and then backed up before
|
||||||
|
restoring the selected snapshot.
|
||||||
|
</div>
|
||||||
|
<button class="btn btn-light" on:click={() => startRestore()} disabled={!sourceBackupId}>
|
||||||
|
<i class="bi bi-safe" /> Restore Now
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<style lang="scss">
|
||||||
|
select {
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
</style>
|
@ -0,0 +1,4 @@
|
|||||||
|
import type { InstancesRecord } from '@pockethost/common'
|
||||||
|
import { writable } from 'svelte/store'
|
||||||
|
|
||||||
|
export const instance = writable<InstancesRecord>()
|
@ -1,10 +1,10 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
|
import { browser } from '$app/environment'
|
||||||
import AuthStateGuard from '$components/helpers/AuthStateGuard.svelte'
|
import AuthStateGuard from '$components/helpers/AuthStateGuard.svelte'
|
||||||
import ProvisioningStatus from '$components/ProvisioningStatus.svelte'
|
import ProvisioningStatus from '$components/ProvisioningStatus.svelte'
|
||||||
import RetroBoxContainer from '$components/RetroBoxContainer.svelte'
|
import RetroBoxContainer from '$components/RetroBoxContainer.svelte'
|
||||||
import { PUBLIC_PB_DOMAIN } from '$src/env'
|
import { PUBLIC_PB_DOMAIN } from '$src/env'
|
||||||
import { client } from '$src/pocketbase'
|
import { client } from '$src/pocketbase'
|
||||||
import { browser } from '$app/environment'
|
|
||||||
import { createCleanupManagerSync } from '$util/CleanupManager'
|
import { createCleanupManagerSync } from '$util/CleanupManager'
|
||||||
import { humanVersion, type InstanceRecordById, type InstancesRecord } from '@pockethost/common'
|
import { humanVersion, type InstanceRecordById, type InstancesRecord } from '@pockethost/common'
|
||||||
import { forEach, values } from '@s-libs/micro-dash'
|
import { forEach, values } from '@s-libs/micro-dash'
|
||||||
@ -34,6 +34,7 @@
|
|||||||
apps = _apps
|
apps = _apps
|
||||||
_touch++
|
_touch++
|
||||||
}
|
}
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
const { getAllInstancesById, watchInstanceById } = client()
|
const { getAllInstancesById, watchInstanceById } = client()
|
||||||
getAllInstancesById()
|
getAllInstancesById()
|
||||||
@ -43,11 +44,10 @@
|
|||||||
forEach(apps, (app) => {
|
forEach(apps, (app) => {
|
||||||
const instanceId = app.id
|
const instanceId = app.id
|
||||||
|
|
||||||
const unsub = watchInstanceById(instanceId, (r) => {
|
watchInstanceById(instanceId, (r) => {
|
||||||
const { action, record } = r
|
const { action, record } = r
|
||||||
_update({ ...apps, [record.id]: record })
|
_update({ ...apps, [record.id]: record })
|
||||||
})
|
}).then(cm.add)
|
||||||
cm.add(unsub)
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.catch((e) => {
|
.catch((e) => {
|
||||||
@ -94,7 +94,7 @@
|
|||||||
<h2 class="mb-4 font-monospace">{app.subdomain}</h2>
|
<h2 class="mb-4 font-monospace">{app.subdomain}</h2>
|
||||||
|
|
||||||
<div class="d-flex justify-content-around">
|
<div class="d-flex justify-content-around">
|
||||||
<a href={`/app/instances/${app.id}`} class="btn btn-light">
|
<a href={`/app/instances/${app.id}/details`} class="btn btn-light">
|
||||||
<i class="bi bi-gear-fill" />
|
<i class="bi bi-gear-fill" />
|
||||||
<span>Details</span>
|
<span>Details</span>
|
||||||
</a>
|
</a>
|
||||||
|
150
patches/sqlite3+5.1.2.patch
Normal file
150
patches/sqlite3+5.1.2.patch
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
diff --git a/node_modules/sqlite3/lib/sqlite3.d.ts b/node_modules/sqlite3/lib/sqlite3.d.ts
|
||||||
|
index b27b0cf..a69dd7e 100644
|
||||||
|
--- a/node_modules/sqlite3/lib/sqlite3.d.ts
|
||||||
|
+++ b/node_modules/sqlite3/lib/sqlite3.d.ts
|
||||||
|
@@ -139,6 +139,145 @@ export class Database extends events.EventEmitter {
|
||||||
|
wait(callback?: (param: null) => void): this;
|
||||||
|
|
||||||
|
interrupt(): void;
|
||||||
|
+
|
||||||
|
+ backup(path:string, callback?: ()=>void): Backup
|
||||||
|
+ backup(filename:string, destDbName:string, sourceDbName:string, filenameIsDest:boolean, callback?: ()=>void): Backup
|
||||||
|
+}
|
||||||
|
+
|
||||||
|
+/**
|
||||||
|
+ *
|
||||||
|
+ * A class for managing an sqlite3_backup object. For consistency
|
||||||
|
+ * with other node-sqlite3 classes, it maintains an internal queue
|
||||||
|
+ * of calls.
|
||||||
|
+ *
|
||||||
|
+ * Intended usage from node:
|
||||||
|
+ *
|
||||||
|
+ * var db = new sqlite3.Database('live.db');
|
||||||
|
+ * var backup = db.backup('backup.db');
|
||||||
|
+ * ...
|
||||||
|
+ * // in event loop, move backup forward when we have time.
|
||||||
|
+ * if (backup.idle) { backup.step(NPAGES); }
|
||||||
|
+ * if (backup.completed) { ... success ... }
|
||||||
|
+ * if (backup.failed) { ... sadness ... }
|
||||||
|
+ * // do other work in event loop - fine to modify live.db
|
||||||
|
+ * ...
|
||||||
|
+ *
|
||||||
|
+ * Here is how sqlite's backup api is exposed:
|
||||||
|
+ *
|
||||||
|
+ * - `sqlite3_backup_init`: This is implemented as
|
||||||
|
+ * `db.backup(filename, [callback])` or
|
||||||
|
+ * `db.backup(filename, destDbName, sourceDbName, filenameIsDest, [callback])`.
|
||||||
|
+ * - `sqlite3_backup_step`: `backup.step(pages, [callback])`.
|
||||||
|
+ * - `sqlite3_backup_finish`: `backup.finish([callback])`.
|
||||||
|
+ * - `sqlite3_backup_remaining`: `backup.remaining`.
|
||||||
|
+ * - `sqlite3_backup_pagecount`: `backup.pageCount`.
|
||||||
|
+ *
|
||||||
|
+ * There are the following read-only properties:
|
||||||
|
+ *
|
||||||
|
+ * - `backup.completed` is set to `true` when the backup
|
||||||
|
+ * succeeeds.
|
||||||
|
+ * - `backup.failed` is set to `true` when the backup
|
||||||
|
+ * has a fatal error.
|
||||||
|
+ * - `backup.idle` is set to `true` when no operation
|
||||||
|
+ * is currently in progress or queued for the backup.
|
||||||
|
+ * - `backup.remaining` is an integer with the remaining
|
||||||
|
+ * number of pages after the last call to `backup.step`
|
||||||
|
+ * (-1 if `step` not yet called).
|
||||||
|
+ * - `backup.pageCount` is an integer with the total number
|
||||||
|
+ * of pages measured during the last call to `backup.step`
|
||||||
|
+ * (-1 if `step` not yet called).
|
||||||
|
+ *
|
||||||
|
+ * There is the following writable property:
|
||||||
|
+ *
|
||||||
|
+ * - `backup.retryErrors`: an array of sqlite3 error codes
|
||||||
|
+ * that are treated as non-fatal - meaning, if they occur,
|
||||||
|
+ * backup.failed is not set, and the backup may continue.
|
||||||
|
+ * By default, this is `[sqlite3.BUSY, sqlite3.LOCKED]`.
|
||||||
|
+ *
|
||||||
|
+ * The `db.backup(filename, [callback])` shorthand is sufficient
|
||||||
|
+ * for making a backup of a database opened by node-sqlite3. If
|
||||||
|
+ * using attached or temporary databases, or moving data in the
|
||||||
|
+ * opposite direction, the more complete (but daunting)
|
||||||
|
+ * `db.backup(filename, destDbName, sourceDbName, filenameIsDest, [callback])`
|
||||||
|
+ * signature is provided.
|
||||||
|
+ *
|
||||||
|
+ * A backup will finish automatically when it succeeds or a fatal
|
||||||
|
+ * error occurs, meaning it is not necessary to call `db.finish()`.
|
||||||
|
+ * By default, SQLITE_LOCKED and SQLITE_BUSY errors are not
|
||||||
|
+ * treated as failures, and the backup will continue if they
|
||||||
|
+ * occur. The set of errors that are tolerated can be controlled
|
||||||
|
+ * by setting `backup.retryErrors`. To disable automatic
|
||||||
|
+ * finishing and stick strictly to sqlite's raw api, set
|
||||||
|
+ * `backup.retryErrors` to `[]`. In that case, it is necessary
|
||||||
|
+ * to call `backup.finish()`.
|
||||||
|
+ *
|
||||||
|
+ * In the same way as node-sqlite3 databases and statements,
|
||||||
|
+ * backup methods can be called safely without callbacks, due
|
||||||
|
+ * to an internal call queue. So for example this naive code
|
||||||
|
+ * will correctly back up a db, if there are no errors:
|
||||||
|
+ *
|
||||||
|
+ * var backup = db.backup('backup.db');
|
||||||
|
+ * backup.step(-1);
|
||||||
|
+ * backup.finish();
|
||||||
|
+ *
|
||||||
|
+ */
|
||||||
|
+export class Backup extends events.EventEmitter {
|
||||||
|
+ /**
|
||||||
|
+ * `true` when the backup is idle and ready for `step()` to
|
||||||
|
+ * be called, `false` when busy.
|
||||||
|
+ */
|
||||||
|
+ readonly idle: boolean
|
||||||
|
+
|
||||||
|
+ /**
|
||||||
|
+ * `true` when the backup has completed, `false` otherwise.
|
||||||
|
+ */
|
||||||
|
+ readonly completed: boolean
|
||||||
|
+
|
||||||
|
+ /**
|
||||||
|
+ * `true` when the backup has failed, `false` otherwise.
|
||||||
|
+ */
|
||||||
|
+ readonly failed: boolean
|
||||||
|
+
|
||||||
|
+ /**
|
||||||
|
+ * The number of remaining pages after the last call to `step()`,
|
||||||
|
+ * or `-1` if `step()` has never been called.
|
||||||
|
+ */
|
||||||
|
+ readonly remaining: number
|
||||||
|
+
|
||||||
|
+ /**
|
||||||
|
+ * The total number of pages measured during the last call to `step()`,
|
||||||
|
+ * or `-1` if `step()` has never been called.
|
||||||
|
+ */
|
||||||
|
+ readonly pageCount: number
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+ /**
|
||||||
|
+ * An array of sqlite3 error codes that are treated as non-fatal -
|
||||||
|
+ * meaning, if they occur, `Backup.failed` is not set, and the backup
|
||||||
|
+ * may continue. By default, this is `[sqlite3.BUSY, sqlite3.LOCKED]`.
|
||||||
|
+ */
|
||||||
|
+ retryErrors: number[]
|
||||||
|
+
|
||||||
|
+ /**
|
||||||
|
+ * Asynchronously finalize the backup (required).
|
||||||
|
+ *
|
||||||
|
+ * @param callback Called when the backup is finalized.
|
||||||
|
+ */
|
||||||
|
+ finish(callback?: ()=>void): void
|
||||||
|
+
|
||||||
|
+ /**
|
||||||
|
+ * Asynchronously perform an incremental segment of the backup.
|
||||||
|
+ *
|
||||||
|
+ * Example:
|
||||||
|
+ *
|
||||||
|
+ * ```
|
||||||
|
+ * backup.step(5)
|
||||||
|
+ * ```
|
||||||
|
+ *
|
||||||
|
+ * @param nPages Number of pages to process (5 recommended).
|
||||||
|
+ * @param callback Called when the step is completed.
|
||||||
|
+ */
|
||||||
|
+ step(nPages: number,callback?: ()=>void): void
|
||||||
|
}
|
||||||
|
|
||||||
|
export function verbose(): sqlite3;
|
@ -93,8 +93,10 @@ git clone git@github.com:benallfree/pockethost.git
|
|||||||
cd pockethost/docker
|
cd pockethost/docker
|
||||||
cp .env-template-dev .env.local # Edit as needed - defaults should work
|
cp .env-template-dev .env.local # Edit as needed - defaults should work
|
||||||
cd ..
|
cd ..
|
||||||
|
docker build .
|
||||||
docker-compose -f docker/build.yaml up --remove-orphans
|
docker-compose -f docker/build.yaml up --remove-orphans
|
||||||
docker-compose -f docker/migrate.yaml up --remove-orphans
|
docker-compose -f docker/migrate.yaml up --remove-orphans
|
||||||
|
docker-compose -f docker/install.yaml up --remove-orphans
|
||||||
docker-compose -f docker/dev.yaml up --remove-orphans
|
docker-compose -f docker/dev.yaml up --remove-orphans
|
||||||
open https://pockethost.test
|
open https://pockethost.test
|
||||||
```
|
```
|
||||||
@ -108,8 +110,10 @@ git clone git@github.com:benallfree/pockethost.git
|
|||||||
cd pockethost/docker
|
cd pockethost/docker
|
||||||
cp .env-template-prod .env.local # Edit as needed - defaults should work
|
cp .env-template-prod .env.local # Edit as needed - defaults should work
|
||||||
cd ..
|
cd ..
|
||||||
|
docker build .
|
||||||
docker compose -f docker/build.yaml up --remove-orphans
|
docker compose -f docker/build.yaml up --remove-orphans
|
||||||
docker compose -f docker/migrate.yaml up --remove-orphans
|
docker compose -f docker/migrate.yaml up --remove-orphans
|
||||||
|
docker-compose -f docker/install.yaml up --remove-orphans
|
||||||
docker compose -f docker/prod.yaml up --remove-orphans
|
docker compose -f docker/prod.yaml up --remove-orphans
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -130,8 +134,9 @@ open https://pockethost.io
|
|||||||
|
|
||||||
# Release History
|
# Release History
|
||||||
|
|
||||||
**next**
|
**0.5.0**
|
||||||
|
|
||||||
|
- [x] Create data backups
|
||||||
- [x] Display version near PocketHost logo
|
- [x] Display version near PocketHost logo
|
||||||
- [x] Account activation ux enhancements
|
- [x] Account activation ux enhancements
|
||||||
- [x] Password reset feature
|
- [x] Password reset feature
|
||||||
|
Loading…
x
Reference in New Issue
Block a user