Update scripts
This commit is contained in:
@@ -1,182 +1,177 @@
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ROOT_DIR, STREAMS_DIR, DATA_DIR } from '../../constants'
|
||||
import { PlaylistParser, StreamTester, CliTable, DataProcessor, DataLoader } from '../../core'
|
||||
import type { TestResult } from '../../core/streamTester'
|
||||
import { Stream } from '../../models'
|
||||
import { program, OptionValues } from 'commander'
|
||||
import { eachLimit } from 'async-es'
|
||||
import chalk from 'chalk'
|
||||
import os from 'node:os'
|
||||
import dns from 'node:dns'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
|
||||
const LIVE_UPDATE_INTERVAL = 5000
|
||||
const LIVE_UPDATE_MAX_STREAMS = 100
|
||||
|
||||
let errors = 0
|
||||
let warnings = 0
|
||||
const results: { [key: string]: string } = {}
|
||||
let interval: string | number | NodeJS.Timeout | undefined
|
||||
let streams = new Collection()
|
||||
let isLiveUpdateEnabled = true
|
||||
|
||||
program
|
||||
.argument('[filepath...]', 'Path to file to test')
|
||||
.option(
|
||||
'-p, --parallel <number>',
|
||||
'Batch size of streams to test concurrently',
|
||||
(value: string) => parseInt(value),
|
||||
os.cpus().length
|
||||
)
|
||||
.option('-x, --proxy <url>', 'Use the specified proxy')
|
||||
.option(
|
||||
'-t, --timeout <number>',
|
||||
'The number of milliseconds before the request will be aborted',
|
||||
(value: string) => parseInt(value),
|
||||
30000
|
||||
)
|
||||
.parse(process.argv)
|
||||
|
||||
const options: OptionValues = program.opts()
|
||||
|
||||
const logger = new Logger()
|
||||
const tester = new StreamTester({ options })
|
||||
|
||||
async function main() {
|
||||
if (await isOffline()) {
|
||||
logger.error(chalk.red('Internet connection is required for the script to work'))
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsGroupedByChannelId, logosGroupedByStreamId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading streams...')
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: rootStorage,
|
||||
channelsKeyById,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByStreamId
|
||||
})
|
||||
const files = program.args.length ? program.args : await rootStorage.list(`${STREAMS_DIR}/*.m3u`)
|
||||
streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
if (streams.count() > LIVE_UPDATE_MAX_STREAMS) isLiveUpdateEnabled = false
|
||||
|
||||
logger.info('starting...')
|
||||
if (!isLiveUpdateEnabled) {
|
||||
drawTable()
|
||||
interval = setInterval(() => {
|
||||
drawTable()
|
||||
}, LIVE_UPDATE_INTERVAL)
|
||||
}
|
||||
|
||||
await eachLimit(
|
||||
streams.all(),
|
||||
options.parallel,
|
||||
async (stream: Stream) => {
|
||||
await runTest(stream)
|
||||
|
||||
if (isLiveUpdateEnabled) {
|
||||
drawTable()
|
||||
}
|
||||
},
|
||||
onFinish
|
||||
)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function runTest(stream: Stream) {
|
||||
const key = stream.filepath + stream.getId() + stream.url
|
||||
results[key] = chalk.white('LOADING...')
|
||||
|
||||
const result: TestResult = await tester.test(stream)
|
||||
|
||||
let status = ''
|
||||
const errorStatusCodes = ['ENOTFOUND', 'HTTP_404_NOT_FOUND']
|
||||
if (result.status.ok) status = chalk.green('OK')
|
||||
else if (errorStatusCodes.includes(result.status.code)) {
|
||||
status = chalk.red(result.status.code)
|
||||
errors++
|
||||
} else {
|
||||
status = chalk.yellow(result.status.code)
|
||||
warnings++
|
||||
}
|
||||
|
||||
results[key] = status
|
||||
}
|
||||
|
||||
function drawTable() {
|
||||
process.stdout.write('\u001b[3J\u001b[1J')
|
||||
console.clear()
|
||||
|
||||
const streamsGrouped = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
for (const filepath of streamsGrouped.keys()) {
|
||||
const streams: Stream[] = streamsGrouped.get(filepath)
|
||||
|
||||
const table = new CliTable({
|
||||
columns: [
|
||||
{ name: '', alignment: 'center', minLen: 3, maxLen: 3 },
|
||||
{ name: 'tvg-id', alignment: 'left', color: 'green', minLen: 25, maxLen: 25 },
|
||||
{ name: 'url', alignment: 'left', color: 'green', minLen: 100, maxLen: 100 },
|
||||
{ name: 'status', alignment: 'left', minLen: 25, maxLen: 25 }
|
||||
]
|
||||
})
|
||||
streams.forEach((stream: Stream, index: number) => {
|
||||
const status = results[stream.filepath + stream.getId() + stream.url] || chalk.gray('PENDING')
|
||||
|
||||
const row = {
|
||||
'': index,
|
||||
'tvg-id': stream.getId().length > 25 ? stream.getId().slice(0, 22) + '...' : stream.getId(),
|
||||
url: stream.url.length > 100 ? stream.url.slice(0, 97) + '...' : stream.url,
|
||||
status
|
||||
}
|
||||
table.append(row)
|
||||
})
|
||||
|
||||
process.stdout.write(`\n${chalk.underline(filepath)}\n`)
|
||||
|
||||
process.stdout.write(table.toString())
|
||||
}
|
||||
}
|
||||
|
||||
function onFinish(error: any) {
|
||||
clearInterval(interval)
|
||||
|
||||
if (error) {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
drawTable()
|
||||
|
||||
if (errors > 0 || warnings > 0) {
|
||||
console.log(
|
||||
chalk.red(`\n${errors + warnings} problems (${errors} errors, ${warnings} warnings)`)
|
||||
)
|
||||
|
||||
if (errors > 0) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
async function isOffline() {
|
||||
return new Promise((resolve, reject) => {
|
||||
dns.lookup('info.cern.ch', err => {
|
||||
if (err) resolve(true)
|
||||
reject(false)
|
||||
})
|
||||
}).catch(() => {})
|
||||
}
|
||||
import { PlaylistParser, StreamTester, CliTable } from '../../core'
|
||||
import type { TestResult } from '../../core/streamTester'
|
||||
import { ROOT_DIR, STREAMS_DIR } from '../../constants'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import { program, OptionValues } from 'commander'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { Stream } from '../../models'
|
||||
import { loadData } from '../../api'
|
||||
import { eachLimit } from 'async'
|
||||
import dns from 'node:dns'
|
||||
import chalk from 'chalk'
|
||||
import os from 'node:os'
|
||||
import { truncate } from '../../utils'
|
||||
|
||||
const LIVE_UPDATE_INTERVAL = 5000
|
||||
const LIVE_UPDATE_MAX_STREAMS = 100
|
||||
|
||||
let errors = 0
|
||||
let warnings = 0
|
||||
const results: { [key: string]: string } = {}
|
||||
let interval: string | number | NodeJS.Timeout | undefined
|
||||
let streams = new Collection<Stream>()
|
||||
let isLiveUpdateEnabled = true
|
||||
|
||||
program
|
||||
.argument('[filepath...]', 'Path to file to test')
|
||||
.option(
|
||||
'-p, --parallel <number>',
|
||||
'Batch size of streams to test concurrently',
|
||||
(value: string) => parseInt(value),
|
||||
os.cpus().length
|
||||
)
|
||||
.option('-x, --proxy <url>', 'Use the specified proxy')
|
||||
.option(
|
||||
'-t, --timeout <number>',
|
||||
'The number of milliseconds before the request will be aborted',
|
||||
(value: string) => parseInt(value),
|
||||
30000
|
||||
)
|
||||
.parse(process.argv)
|
||||
|
||||
const options: OptionValues = program.opts()
|
||||
|
||||
const logger = new Logger()
|
||||
const tester = new StreamTester({ options })
|
||||
|
||||
async function main() {
|
||||
if (await isOffline()) {
|
||||
logger.error(chalk.red('Internet connection is required for the script to work'))
|
||||
return
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading streams...')
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const parser = new PlaylistParser({
|
||||
storage: rootStorage
|
||||
})
|
||||
const files = program.args.length ? program.args : await rootStorage.list(`${STREAMS_DIR}/*.m3u`)
|
||||
streams = await parser.parse(files)
|
||||
|
||||
logger.info(`found ${streams.count()} streams`)
|
||||
if (streams.count() > LIVE_UPDATE_MAX_STREAMS) isLiveUpdateEnabled = false
|
||||
|
||||
logger.info('starting...')
|
||||
if (!isLiveUpdateEnabled) {
|
||||
drawTable()
|
||||
interval = setInterval(() => {
|
||||
drawTable()
|
||||
}, LIVE_UPDATE_INTERVAL)
|
||||
}
|
||||
|
||||
eachLimit(
|
||||
streams.all(),
|
||||
options.parallel,
|
||||
async (stream: Stream) => {
|
||||
await runTest(stream)
|
||||
|
||||
if (isLiveUpdateEnabled) {
|
||||
drawTable()
|
||||
}
|
||||
},
|
||||
onFinish
|
||||
)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function runTest(stream: Stream) {
|
||||
const key = stream.getUniqKey()
|
||||
results[key] = chalk.white('LOADING...')
|
||||
|
||||
const result: TestResult = await tester.test(stream)
|
||||
|
||||
let status = ''
|
||||
const errorStatusCodes = ['ENOTFOUND', 'HTTP_404_NOT_FOUND']
|
||||
if (result.status.ok) status = chalk.green('OK')
|
||||
else if (errorStatusCodes.includes(result.status.code)) {
|
||||
status = chalk.red(result.status.code)
|
||||
errors++
|
||||
} else {
|
||||
status = chalk.yellow(result.status.code)
|
||||
warnings++
|
||||
}
|
||||
|
||||
results[key] = status
|
||||
}
|
||||
|
||||
function drawTable() {
|
||||
process.stdout.write('\u001b[3J\u001b[1J')
|
||||
console.clear()
|
||||
|
||||
const streamsGrouped = streams.groupBy((stream: Stream) => stream.filepath)
|
||||
for (const filepath of streamsGrouped.keys()) {
|
||||
const streams: Stream[] = streamsGrouped.get(filepath) || []
|
||||
|
||||
const table = new CliTable({
|
||||
columns: [
|
||||
{ name: '', alignment: 'center', minLen: 3, maxLen: 3 },
|
||||
{ name: 'tvg-id', alignment: 'left', color: 'green', minLen: 25, maxLen: 25 },
|
||||
{ name: 'url', alignment: 'left', color: 'green', minLen: 100, maxLen: 100 },
|
||||
{ name: 'status', alignment: 'left', minLen: 25, maxLen: 25 }
|
||||
]
|
||||
})
|
||||
streams.forEach((stream: Stream, index: number) => {
|
||||
const key = stream.getUniqKey()
|
||||
const status = results[key] || chalk.gray('PENDING')
|
||||
const tvgId = stream.getTvgId()
|
||||
|
||||
const row = {
|
||||
'': index,
|
||||
'tvg-id': truncate(tvgId, 25),
|
||||
url: truncate(stream.url, 100),
|
||||
status
|
||||
}
|
||||
table.append(row)
|
||||
})
|
||||
|
||||
process.stdout.write(`\n${chalk.underline(filepath)}\n`)
|
||||
|
||||
process.stdout.write(table.toString())
|
||||
}
|
||||
}
|
||||
|
||||
function onFinish(error: Error) {
|
||||
clearInterval(interval)
|
||||
|
||||
if (error) {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
drawTable()
|
||||
|
||||
if (errors > 0 || warnings > 0) {
|
||||
console.log(
|
||||
chalk.red(`\n${errors + warnings} problems (${errors} errors, ${warnings} warnings)`)
|
||||
)
|
||||
|
||||
if (errors > 0) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
async function isOffline() {
|
||||
return new Promise((resolve, reject) => {
|
||||
dns.lookup('info.cern.ch', err => {
|
||||
if (err) resolve(true)
|
||||
reject(false)
|
||||
})
|
||||
}).catch(() => {})
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user