Analytics Overhaul and InfluxDB Removal (#3243)

- Make the Analytics table in the database the primary one for handling statistics for all stations, removing the InfluxDB dependency entirely
 - Expand the Analytics table to also track unique listeners per hour and day
 - Properly clean up the Listeners table according to each installation's history retention settings
 - Implement a cute new animated "waiting for services" startup message that avoids previous wait messages that looked more like errors
This commit is contained in:
Buster "Silver Eagle" Neece 2020-10-07 18:50:30 -05:00 committed by GitHub
parent b660f011c7
commit a6ec36b21c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 680 additions and 740 deletions

View File

@ -3,7 +3,7 @@ charset = utf-8
end_of_line = lf
indent_size = 4
indent_style = space
insert_final_newline = false
insert_final_newline = true
max_line_length = 120
tab_width = 4
ij_continuation_indent_size = 4

View File

@ -66,9 +66,5 @@ ENV APPLICATION_ENV="production" \
ADDITIONAL_MEDIA_SYNC_WORKER_COUNT=0
# Entrypoint and default command
ENTRYPOINT ["dockerize",\
"-wait","tcp://mariadb:3306",\
"-wait","tcp://influxdb:8086",\
"-wait","tcp://redis:6379",\
"-timeout","90s"]
ENTRYPOINT ["/usr/local/bin/uptime_wait"]
CMD ["/usr/local/bin/my_init"]

View File

@ -68,7 +68,6 @@ For x86/x64 installations, [SHOUTcast 2 DNAS](http://wiki.shoutcast.com/wiki/SHO
* **[NGINX](https://www.nginx.com)** for serving web pages and the radio proxy
* **[MariaDB](https://mariadb.org/)** as the primary database
* **[PHP 7.2](https://secure.php.net/)** powering the web application
* **[InfluxDB](https://www.influxdata.com/)** for time-series based statistics
* **[Redis](https://redis.io/)** for sessions, database and general caching
## AzuraCast API

139
bin/uptime_wait Normal file
View File

@ -0,0 +1,139 @@
#!/usr/bin/env php
<?php
error_reporting(E_ALL & ~E_NOTICE & ~E_STRICT);
ini_set('display_errors', 1);
class Spinner
{
protected array $frames = [];
protected int $length;
protected int $current = 0;
public function __construct(array $frames)
{
$this->frames = $frames;
$this->length = count($this->frames);
}
public function tick(string $message): void
{
$next = $this->next();
echo chr(27) . '[0G';
echo sprintf('%s %s', $this->frames[$next], $message);
}
private function next(): int
{
$prev = $this->current;
$this->current = $prev + 1;
if ($this->current >= $this->length) {
$this->current = 0;
}
return $prev;
}
}
class UptimeWait
{
protected Spinner $spinner;
protected int $timeout = 180;
protected int $retryInterval = 1;
protected bool $debugMode = false;
public function __construct()
{
$this->spinner = new Spinner([
'🖥️🎶-🎵-📻',
'🖥️-🎶-🎵📻',
'🖥️🎵-🎶-📻',
'🖥️-🎵-🎶📻',
]);
$_ENV = getenv();
$applicationEnv = $_ENV['LOG_LEVEL'] ?? 'notice';
$this->debugMode = ('debug' === $applicationEnv);
}
public function run(): void
{
$this->println('Starting up AzuraCast services...');
$elapsed = 0;
while ($elapsed <= $this->timeout) {
if ($this->checkDatabase() && $this->checkRedis()) {
$this->println('Services started up and ready!');
die(0);
}
sleep($this->retryInterval);
$elapsed += $this->retryInterval;
$this->spinner->tick('Waiting...');
}
$this->println('Timed out waiting for services to start.');
die(1);
}
protected function checkDatabase(): bool
{
try {
$dbOptions = [
'host' => $_ENV['MYSQL_HOST'] ?? 'mariadb',
'port' => $_ENV['MYSQL_PORT'] ?? 3306,
'dbname' => $_ENV['MYSQL_DATABASE'],
'user' => $_ENV['MYSQL_USER'],
'password' => $_ENV['MYSQL_PASSWORD'],
];
$dbh = new PDO('mysql:host=' . $dbOptions['host'] . ';dbname=' . $dbOptions['dbname'], $dbOptions['user'],
$dbOptions['password']);
$dbh->exec('SELECT 1');
return true;
} catch (Throwable $e) {
if ($this->debugMode) {
$this->println($e->getMessage());
}
return false;
}
}
protected function checkRedis(): bool
{
try {
$redis = new Redis();
$redis->connect('redis', 6379, 15);
$redis->select(1);
$redis->ping();
return true;
} catch (Throwable $e) {
if ($this->debugMode) {
$this->println($e->getMessage());
}
return false;
}
}
protected function println(string $line): void
{
echo $line . "\n";
}
}
$uptimeWait = new UptimeWait;
$uptimeWait->run();

View File

@ -35,7 +35,6 @@
"guzzlehttp/guzzle": "^7.0",
"guzzlehttp/oauth-subscriber": "^0.4.0",
"http-interop/http-factory-guzzle": "^1.0",
"influxdb/influxdb-php": "1.15.1",
"james-heinrich/getid3": "dev-master",
"jhofm/flysystem-iterator": "^2.1",
"laminas/laminas-config": "^3.3",

63
composer.lock generated
View File

@ -4,7 +4,7 @@
"Read more about it at https://getcomposer.org/doc/01-basic-usage.md#installing-dependencies",
"This file is @generated automatically"
],
"content-hash": "9a9cb6e8e6a42a17e3b5bfbaffee22cb",
"content-hash": "0d1cc8d8f2f751d6ee6c3515497b56e6",
"packages": [
{
"name": "aws/aws-sdk-php",
@ -2672,67 +2672,6 @@
],
"time": "2018-07-31T19:32:56+00:00"
},
{
"name": "influxdb/influxdb-php",
"version": "1.15.1",
"source": {
"type": "git",
"url": "https://github.com/influxdata/influxdb-php.git",
"reference": "447acb600969f9510c9f1900a76d442fc3537b0e"
},
"dist": {
"type": "zip",
"url": "https://api.github.com/repos/influxdata/influxdb-php/zipball/447acb600969f9510c9f1900a76d442fc3537b0e",
"reference": "447acb600969f9510c9f1900a76d442fc3537b0e",
"shasum": ""
},
"require": {
"guzzlehttp/guzzle": "^6.0|^7.0",
"php": "^5.5 || ^7.0"
},
"require-dev": {
"phpunit/phpunit": "^5.7"
},
"suggest": {
"ext-curl": "Curl extension, needed for Curl driver",
"stefanotorresi/influxdb-php-async": "An asyncronous client for InfluxDB, implemented via ReactPHP."
},
"type": "library",
"autoload": {
"psr-4": {
"InfluxDB\\": "src/InfluxDB"
}
},
"notification-url": "https://packagist.org/downloads/",
"license": [
"MIT"
],
"authors": [
{
"name": "Stephen Hoogendijk",
"email": "stephen@tca0.nl"
},
{
"name": "Daniel Martinez",
"email": "danimartcas@hotmail.com"
},
{
"name": "Gianluca Arbezzano",
"email": "gianarb92@gmail.com"
}
],
"description": "InfluxDB client library for PHP",
"keywords": [
"client",
"influxdata",
"influxdb",
"influxdb class",
"influxdb client",
"influxdb library",
"time series"
],
"time": "2020-09-18T13:24:03+00:00"
},
{
"name": "james-heinrich/getid3",
"version": "dev-master",

View File

@ -81,11 +81,6 @@ return function (Application $console) {
Command\MigrateConfigCommand::class
)->setDescription(__('Migrate existing configuration to new INI format if any exists.'));
$console->command(
'azuracast:setup:influx',
Command\Influx\SetupCommand::class
)->setDescription(__('Initial setup of InfluxDB.'));
$console->command(
'azuracast:setup:fixtures',
Command\SetupFixturesCommand::class
@ -127,16 +122,6 @@ return function (Application $console) {
Command\GenerateApiDocsCommand::class
)->setDescription('Trigger regeneration of AzuraCast API documentation.');
$console->command(
'azuracast:internal:uptime-wait',
Command\UptimeWaitCommand::class
)->setDescription('Wait until core services are online and accepting connections before continuing.');
$console->command(
'influxdb:query query',
Command\Influx\QueryCommand::class
)->setDescription('Execute a query on the InfluxDB database.');
// User-side tools
$console->command(
'azuracast:account:list',

View File

@ -118,6 +118,10 @@ return [
$config->addCustomNumericFunction('RAND', DoctrineExtensions\Query\Mysql\Rand::class);
if (!Doctrine\DBAL\Types\Type::hasType('carbon_immutable')) {
Doctrine\DBAL\Types\Type::addType('carbon_immutable', Carbon\Doctrine\CarbonImmutableType::class);
}
$eventManager = new Doctrine\Common\EventManager;
$eventManager->addEventSubscriber($eventRequiresRestart);
$eventManager->addEventSubscriber($eventAuditLog);
@ -345,17 +349,6 @@ return [
]);
},
// InfluxDB
InfluxDB\Database::class => function (Settings $settings) {
$opts = [
'host' => $settings->isDocker() ? 'influxdb' : 'localhost',
'port' => 8086,
];
$influx = new InfluxDB\Client($opts['host'], $opts['port']);
return $influx->selectDB('stations');
},
// Supervisor manager
Supervisor\Supervisor::class => function (Settings $settings) {
$client = new fXmlRpc\Client(

View File

@ -21,10 +21,6 @@ services:
ports:
- "127.0.0.1:3306:3306"
influxdb:
build:
context: ../docker-azuracast-influxdb
redis:
build:
context: ../docker-azuracast-redis

View File

@ -51,13 +51,12 @@ services:
- '${AZURACAST_SFTP_PORT:-2022}:2022'
depends_on:
- mariadb
- influxdb
- stations
- redis
env_file: azuracast.env
environment:
LANG: ${LANG:-en_US.UTF-8}
AZURACAST_DC_REVISION: 10
AZURACAST_DC_REVISION: 11
AZURACAST_VERSION: ${AZURACAST_VERSION:-latest}
AZURACAST_SFTP_PORT: ${AZURACAST_SFTP_PORT:-2022}
VIRTUAL_HOST: ${LETSENCRYPT_HOST:-azuracast.local}
@ -95,15 +94,6 @@ services:
restart: always
logging: *default-logging
influxdb:
image: "azuracast/azuracast_influxdb:${AZURACAST_VERSION:-latest}"
volumes:
- influx_data:/var/lib/influxdb
networks:
- backend
restart: always
logging: *default-logging
redis:
image: "azuracast/azuracast_redis:${AZURACAST_VERSION:-latest}"
sysctls:
@ -292,7 +282,6 @@ networks:
volumes:
nginx_proxy_vhosts: { }
db_data: { }
influx_data: { }
letsencrypt: { }
letsencrypt_html: { }
shoutcast2_install: { }

View File

@ -485,14 +485,12 @@ restore-legacy() {
if [ -f "$BACKUP_PATH" ]; then
docker-compose down
docker volume rm azuracast_db_data azuracast_influx_data azuracast_station_data
docker volume rm azuracast_db_data azuracast_station_data
docker volume create azuracast_db_data
docker volume create azuracast_influx_data
docker volume create azuracast_station_data
docker run --rm -v "$BACKUP_DIR:/backup" \
-v azuracast_db_data:/azuracast/db \
-v azuracast_influx_data:/azuracast/influx \
-v azuracast_station_data:/azuracast/stations \
busybox tar zxvf "/backup/$BACKUP_FILENAME"

View File

@ -134,6 +134,8 @@ class AppFactory
ini_set('session.cookie_lifetime', '86400');
ini_set('session.use_strict_mode', '1');
date_default_timezone_set('UTC');
session_cache_limiter('');
}

View File

@ -7,7 +7,6 @@ use App\Entity;
use App\Sync\Task\Backup;
use App\Utilities;
use Doctrine\ORM\EntityManagerInterface;
use InfluxDB\Database;
use Symfony\Component\Console\Style\SymfonyStyle;
use const PATHINFO_EXTENSION;
@ -18,7 +17,6 @@ class BackupCommand extends CommandAbstract
public function __invoke(
SymfonyStyle $io,
EntityManagerInterface $em,
Database $influxdb,
?string $path = '',
bool $excludeMedia = false
) {
@ -46,12 +44,6 @@ class BackupCommand extends CommandAbstract
return 1;
}
$tmp_dir_influxdb = '/tmp/azuracast_backup_influxdb';
if (!mkdir($tmp_dir_influxdb) && !is_dir($tmp_dir_influxdb)) {
$io->error(__('Directory "%s" was not created', $tmp_dir_influxdb));
return 1;
}
$io->newLine();
// Back up MariaDB
@ -78,25 +70,6 @@ class BackupCommand extends CommandAbstract
$files_to_backup[] = $path_db_dump;
$io->newLine();
// Back up InfluxDB
$io->section(__('Backing up InfluxDB...'));
$influxdb_client = $influxdb->getClient();
$this->passThruProcess($io, [
'influxd',
'backup',
'-database',
'stations',
'-portable',
'-host',
$influxdb_client->getHost() . ':8088',
$tmp_dir_influxdb,
], $tmp_dir_influxdb);
$files_to_backup[] = $tmp_dir_influxdb;
$io->newLine();
// Include station media if specified.
if ($includeMedia) {
$stations = $em->createQuery(/** @lang DQL */ 'SELECT s FROM App\Entity\Station s')
@ -160,7 +133,6 @@ class BackupCommand extends CommandAbstract
$io->section(__('Cleaning up temporary files...'));
Utilities::rmdirRecursive($tmp_dir_mariadb);
Utilities::rmdirRecursive($tmp_dir_influxdb);
$io->newLine();

View File

@ -6,7 +6,6 @@ use App\Console\Command\Traits;
use App\Sync\Task\Backup;
use App\Utilities;
use Doctrine\ORM\EntityManagerInterface;
use InfluxDB\Database;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Console\Style\SymfonyStyle;
use const PATHINFO_EXTENSION;
@ -19,7 +18,6 @@ class RestoreCommand extends CommandAbstract
SymfonyStyle $io,
OutputInterface $output,
EntityManagerInterface $em,
Database $influxdb,
string $path
) {
$start_time = microtime(true);
@ -92,28 +90,6 @@ class RestoreCommand extends CommandAbstract
Utilities::rmdirRecursive($tmp_dir_mariadb);
$io->newLine();
// Handle InfluxDB import
$tmp_dir_influxdb = '/tmp/azuracast_backup_influxdb';
if (!is_dir($tmp_dir_influxdb)) {
$io->getErrorStyle()->error('InfluxDB backup file not found!');
return 1;
}
$influxdb_client = $influxdb->getClient();
$this->passThruProcess($io, [
'influxd',
'restore',
'-portable',
'-host',
$influxdb_client->getHost() . ':8088',
$tmp_dir_influxdb,
], $tmp_dir_influxdb);
Utilities::rmdirRecursive($tmp_dir_influxdb);
$io->newLine();
// Update from current version to latest.
$io->section('Running standard updates...');

View File

@ -11,7 +11,7 @@ class ClearCacheCommand extends CommandAbstract
// Flush all Redis entries.
$redis->flushAll();
$io->writeln('Local cache flushed.');
$io->success('Local cache flushed.');
return 0;
}
}

View File

@ -1,21 +0,0 @@
<?php
namespace App\Console\Command\Influx;
use App\Console\Command\CommandAbstract;
use InfluxDB\Database;
use Symfony\Component\Console\Style\SymfonyStyle;
class QueryCommand extends CommandAbstract
{
public function __invoke(
SymfonyStyle $io,
Database $influxdb,
$query
) {
$output = $influxdb->query($query);
$parsed = json_decode($output->getRaw(), true, 512, JSON_THROW_ON_ERROR);
$io->writeln(json_encode($parsed, JSON_THROW_ON_ERROR | JSON_PRETTY_PRINT));
return 0;
}
}

View File

@ -1,97 +0,0 @@
<?php
namespace App\Console\Command\Influx;
use App\Console\Command\CommandAbstract;
use App\Settings;
use InfluxDB\Database;
use Symfony\Component\Console\Style\SymfonyStyle;
class SetupCommand extends CommandAbstract
{
public function __invoke(
SymfonyStyle $io,
Database $influxdb,
Settings $settings
) {
$db_name = $influxdb->getName();
// Create the database (if it doesn't exist)
$influxdb->create();
$io->writeln(__('Database created.'));
// Establish retention policies
$retention_policies = [
['name' => '15s', 'duration' => '5d', 'default' => true],
['name' => '1h', 'duration' => '2w', 'default' => false],
['name' => '1d', 'duration' => 'INF', 'default' => false],
];
$all_rps_raw = $influxdb->listRetentionPolicies();
$existing_rps = [];
foreach ($all_rps_raw as $rp) {
$existing_rps[$rp['name']] = $rp;
}
foreach ($retention_policies as $rp) {
$rp_obj = new Database\RetentionPolicy($rp['name'], $rp['duration'], 1, $rp['default']);
if (isset($existing_rps[$rp['name']])) {
$influxdb->alterRetentionPolicy($rp_obj);
unset($existing_rps[$rp['name']]);
} else {
$influxdb->createRetentionPolicy($rp_obj);
}
}
// Remove any remaining retention policies that aren't defined here
if (!empty($existing_rps)) {
foreach ($existing_rps as $rp_name => $rp_info) {
$influxdb->query(sprintf('DROP RETENTION POLICY %s ON %s', $rp_name, $db_name));
}
}
$io->writeln(__('Retention policies updated.'));
// Drop existing continuous queries.
$cqs = $influxdb->query('SHOW CONTINUOUS QUERIES');
foreach ((array)$cqs->getPoints() as $existing_cq) {
$influxdb->query(sprintf('DROP CONTINUOUS QUERY %s ON %s', $existing_cq['name'], $db_name));
}
// Create continuous queries
$downsample_retentions = ['1h', '1d'];
foreach ($downsample_retentions as $dr) {
$cq_name = 'cq_' . $dr;
$cq_fields = 'min(value) AS min, mean(value) AS value, max(value) AS max';
$influxdb->query(sprintf('CREATE CONTINUOUS QUERY %s ON %s BEGIN SELECT %s INTO "%s".:MEASUREMENT FROM /.*/ GROUP BY time(%s) END',
$cq_name, $db_name, $cq_fields, $dr, $dr));
}
$io->writeln(__('Continuous queries created.'));
// Print debug information
if (!$settings->isProduction()) {
$rps_raw = $influxdb->query('SHOW RETENTION POLICIES');
$rps = (array)$rps_raw->getPoints();
$io->writeln(print_r($rps, true));
$cqs_raw = $influxdb->query('SHOW CONTINUOUS QUERIES');
$cqs = [];
foreach ((array)$cqs_raw->getPoints() as $cq) {
$cqs[$cq['name']] = $cq['query'];
}
$io->writeln(print_r($cqs, true));
}
$io->writeln(__('InfluxDB databases created.'));
return 0;
}
}

View File

@ -30,8 +30,6 @@ class SetupCommand extends CommandAbstract
__('Installation Method: %s', $settings->isDocker() ? 'Docker' : 'Ansible'),
]);
$this->runCommand($output, 'azuracast:internal:uptime-wait');
if ($update) {
$io->note(__('Running in update mode.'));
@ -42,10 +40,6 @@ class SetupCommand extends CommandAbstract
}
}
$io->section(__('Setting Up InfluxDB'));
$this->runCommand($output, 'azuracast:setup:influx');
/** @var EntityManagerInterface $em */
$em = $di->get(EntityManagerInterface::class);
$conn = $em->getConnection();

View File

@ -1,15 +1,11 @@
<?php
namespace App\Console\Command;
use App\Entity\Station;
use App\Settings;
use Carbon\CarbonImmutable;
use Doctrine\Common\DataFixtures\Executor\ORMExecutor;
use Doctrine\Common\DataFixtures\Loader;
use Doctrine\Common\DataFixtures\Purger\ORMPurger;
use Doctrine\ORM\EntityManagerInterface;
use InfluxDB\Database;
use InfluxDB\Point;
use Psr\Container\ContainerInterface;
use RecursiveDirectoryIterator;
use RecursiveIteratorIterator;
@ -21,7 +17,6 @@ class SetupFixturesCommand extends CommandAbstract
SymfonyStyle $io,
EntityManagerInterface $em,
ContainerInterface $di,
Database $influx,
Settings $settings
) {
$loader = new Loader();
@ -50,44 +45,7 @@ class SetupFixturesCommand extends CommandAbstract
$executor = new ORMExecutor($em, $purger);
$executor->execute($loader->getFixtures());
// Preload sample data.
$stations = $em->getRepository(Station::class)->findAll();
$midnight_utc = CarbonImmutable::now('UTC')->setTime(0, 0);
$influx_points = [];
for ($i = 1; $i <= 14; $i++) {
$day = $midnight_utc->subDays($i)->getTimestamp();
$day_listeners = 0;
foreach ($stations as $station) {
/** @var Station $station */
$station_listeners = random_int(1, 20);
$day_listeners += $station_listeners;
$influx_points[] = new Point(
'station.' . $station->getId() . '.listeners',
(float)$station_listeners,
[],
['station' => $station->getId()],
$day
);
}
$influx_points[] = new Point(
'station.all.listeners',
(float)$day_listeners,
[],
['station' => 0],
$day
);
}
$influx->writePoints($influx_points, Database::PRECISION_SECONDS, '1d');
$io->writeln(__('Fixtures loaded.'));
$io->success(__('Fixtures loaded.'));
return 0;
}

View File

@ -2,18 +2,14 @@
namespace App\Console\Command;
use App;
use App\Service\UptimeWait;
use App\Sync\Runner;
class SyncCommand extends CommandAbstract
{
public function __invoke(
Runner $sync,
UptimeWait $uptimeWait,
string $task = 'nowplaying'
) {
$uptimeWait->waitForAll();
switch ($task) {
case 'long':
$sync->syncLong();

View File

@ -1,34 +0,0 @@
<?php
namespace App\Console\Command;
use App;
use Exception;
use Symfony\Component\Console\Style\SymfonyStyle;
class UptimeWaitCommand extends CommandAbstract
{
public function __invoke(
SymfonyStyle $io,
App\Service\UptimeWait $uptimeWait
) {
$io->writeln('Waiting for dependent services to go online...');
$io->progressStart(3);
try {
$uptimeWait->waitForDatabase();
$io->progressAdvance();
$uptimeWait->waitForInflux();
$io->progressAdvance();
$uptimeWait->waitForRedis();
$io->progressAdvance();
} catch (Exception $e) {
$io->error('Error encountered: ' . $e->getMessage() . ' (' . $e->getFile() . ' L' . $e->getLine() . ')');
return 1;
}
$io->progressFinish();
return 0;
}
}

View File

@ -9,8 +9,8 @@ use App\Http\Response;
use App\Http\Router;
use App\Http\ServerRequest;
use App\Radio\Adapters;
use Carbon\CarbonImmutable;
use Doctrine\ORM\EntityManagerInterface;
use InfluxDB\Database;
use Psr\Http\Message\ResponseInterface;
use Psr\SimpleCache\CacheInterface;
use stdClass;
@ -25,8 +25,6 @@ class DashboardController
protected CacheInterface $cache;
protected Database $influx;
protected Router $router;
protected Adapters $adapter_manager;
@ -38,7 +36,6 @@ class DashboardController
Entity\Repository\SettingsRepository $settingsRepo,
Acl $acl,
CacheInterface $cache,
Database $influx,
Adapters $adapter_manager,
EventDispatcher $dispatcher
) {
@ -46,7 +43,6 @@ class DashboardController
$this->settingsRepo = $settingsRepo;
$this->acl = $acl;
$this->cache = $cache;
$this->influx = $influx;
$this->adapter_manager = $adapter_manager;
$this->dispatcher = $dispatcher;
}
@ -155,32 +151,30 @@ class DashboardController
// Statistics by day.
$station_averages = [];
// Query InfluxDB database.
$resultset = $this->influx->query('SELECT * FROM "1d"./.*/ WHERE time > now() - 180d', [
'epoch' => 'ms',
]);
$threshold = CarbonImmutable::parse('-180 days');
$results_raw = $resultset->getSeries();
$results = [];
foreach ($results_raw as $serie) {
$points = [];
foreach ($serie['values'] as $point) {
$points[] = array_combine($serie['columns'], $point);
}
$stats = $this->em->createQuery(/** @lang DQL */ 'SELECT a.station_id, a.moment, a.number_avg, a.number_unique
FROM App\Entity\Analytics a
WHERE a.station_id IS NULL OR a.station_id IN (:stations)
AND a.type = :type
AND a.moment >= :threshold')
->setParameter('stations', $view_stations)
->setParameter('type', Entity\Analytics::INTERVAL_DAILY)
->setParameter('threshold', $threshold)
->getArrayResult();
$results[$serie['name']] = $points;
}
foreach ($stats as $row) {
$station_id = $row['station_id'] ?? 'all';
foreach ($results as $stat_series => $stat_rows) {
$series_split = explode('.', $stat_series);
$station_id = $series_split[1];
/** @var CarbonImmutable $moment */
$moment = $row['moment'];
foreach ($stat_rows as $stat_row) {
$station_averages[$station_id][$stat_row['time']] = [
$stat_row['time'],
round($stat_row['value'], 2),
];
}
$moment = $moment->getTimestamp() * 1000;
$station_averages[$station_id][$moment] = [
$moment,
round($row['number_avg'], 2),
];
}
$metric_stations = [];

View File

@ -6,7 +6,6 @@ use App\Http\Response;
use App\Http\ServerRequest;
use Carbon\CarbonImmutable;
use Doctrine\ORM\EntityManagerInterface;
use InfluxDB\Database;
use Psr\Http\Message\ResponseInterface;
use stdClass;
use function array_reverse;
@ -18,16 +17,16 @@ class OverviewController
protected Entity\Repository\SettingsRepository $settingsRepo;
protected Database $influx;
protected Entity\Repository\AnalyticsRepository $analyticsRepo;
public function __construct(
EntityManagerInterface $em,
Entity\Repository\SettingsRepository $settingsRepo,
Database $influx
Entity\Repository\AnalyticsRepository $analyticsRepo
) {
$this->em = $em;
$this->settingsRepo = $settingsRepo;
$this->influx = $influx;
$this->analyticsRepo = $analyticsRepo;
}
public function __invoke(ServerRequest $request, Response $response): ResponseInterface
@ -45,13 +44,11 @@ class OverviewController
}
/* Statistics */
$statisticsThreshold = CarbonImmutable::parse('-1 month', $station_tz)->getTimestamp();
$statisticsThreshold = CarbonImmutable::parse('-1 month', $station_tz);
// Statistics by day.
$resultset = $this->influx->query('SELECT * FROM "1d"."station.' . $station->getId() . '.listeners" WHERE time > now() - 30d',
[
'epoch' => 'ms',
]);
$dailyStats = $this->analyticsRepo->findForStationAfterTime($station, $statisticsThreshold,
Entity\Analytics::INTERVAL_DAILY);
$daily_chart = new stdClass;
$daily_chart->label = __('Listeners by Day');
@ -66,20 +63,22 @@ class OverviewController
$days_of_week = [];
foreach ($resultset->getPoints() as $stat) {
foreach ($dailyStats as $stat) {
/** @var CarbonImmutable $statTime */
$statTime = $stat['moment'];
$statTime = $statTime->shiftTimezone($station_tz);
$avg_row = new stdClass;
$avg_row->t = $stat['time'];
$avg_row->y = round($stat['value'], 2);
$avg_row->t = $statTime->getTimestamp() * 1000;
$avg_row->y = round($stat['number_avg'], 2);
$daily_averages[] = $avg_row;
$dt = CarbonImmutable::createFromTimestamp($avg_row->t / 1000, $station_tz);
$row_date = $dt->format('Y-m-d');
$row_date = $statTime->format('Y-m-d');
$daily_alt[] = '<dt><time data-original="' . $avg_row->t . '">' . $row_date . '</time></dt>';
$daily_alt[] = '<dd>' . $avg_row->y . ' ' . __('Listeners') . '</dd>';
$day_of_week = (int)$dt->format('N') - 1;
$days_of_week[$day_of_week][] = $stat['value'];
$day_of_week = (int)$statTime->format('N') - 1;
$days_of_week[$day_of_week][] = $stat['number_avg'];
}
$daily_alt[] = '</dl>';
@ -128,19 +127,18 @@ class OverviewController
];
// Statistics by hour.
$resultset = $this->influx->query('SELECT * FROM "1h"."station.' . $station->getId() . '.listeners"', [
'epoch' => 'ms',
]);
$hourly_stats = $resultset->getPoints();
$hourlyStats = $this->analyticsRepo->findForStationAfterTime($station, $statisticsThreshold,
Entity\Analytics::INTERVAL_HOURLY);
$totals_by_hour = [];
foreach ($hourly_stats as $stat) {
$dt = CarbonImmutable::createFromTimestamp($stat['time'] / 1000, $station_tz);
foreach ($hourlyStats as $stat) {
/** @var CarbonImmutable $statTime */
$statTime = $stat['moment'];
$statTime = $statTime->shiftTimezone($station_tz);
$hour = (int)$dt->format('G');
$totals_by_hour[$hour][] = $stat['value'];
$hour = (int)$statTime->format('G');
$totals_by_hour[$hour][] = $stat['number_avg'];
}
$hourly_labels = [];
@ -182,7 +180,7 @@ class OverviewController
GROUP BY sh.song_id
ORDER BY records DESC')
->setParameter('station_id', $station->getId())
->setParameter('timestamp', $statisticsThreshold)
->setParameter('timestamp', $statisticsThreshold->getTimestamp())
->setMaxResults(40)
->getArrayResult();

View File

@ -1,6 +1,9 @@
<?php
namespace App\Entity;
use Carbon\CarbonImmutable;
use DateTimeInterface;
use DateTimeZone;
use Doctrine\ORM\Mapping as ORM;
/**
@ -20,6 +23,9 @@ class Analytics
/** @var string No analytics data collected of any sort. */
public const LEVEL_NONE = 'none';
public const INTERVAL_DAILY = 'day';
public const INTERVAL_HOURLY = 'hour';
/**
* @ORM\Column(name="id", type="integer")
* @ORM\Id
@ -34,6 +40,15 @@ class Analytics
*/
protected $station_id;
/**
* @ORM\ManyToOne(targetEntity="Station")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="station_id", referencedColumnName="id", onDelete="CASCADE")
* })
* @var Station|null
*/
protected $station;
/**
* @ORM\Column(name="type", type="string", length=15)
* @var string
@ -41,10 +56,10 @@ class Analytics
protected $type;
/**
* @ORM\Column(name="timestamp", type="integer")
* @var int
* @ORM\Column(name="moment", type="carbon_immutable", precision=0)
* @var CarbonImmutable
*/
protected $timestamp;
protected $moment;
/**
* @ORM\Column(name="number_min", type="integer")
@ -59,34 +74,43 @@ class Analytics
protected $number_max;
/**
* @ORM\Column(name="number_avg", type="integer")
* @var int
* @ORM\Column(name="number_avg", type="decimal", precision=10, scale=2)
* @var string
*/
protected $number_avg;
/**
* @ORM\ManyToOne(targetEntity="Station")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="station_id", referencedColumnName="id", onDelete="CASCADE")
* })
* @var Station|null
* @ORM\Column(name="number_unique", type="integer", nullable=true)
* @var int|null
*/
protected $station;
protected $number_unique;
public function __construct(
Station $station = null,
$type = 'day',
$timestamp = null,
$number_min = 0,
$number_max = 0,
$number_avg = 0
DateTimeInterface $moment,
?Station $station = null,
$type = self::INTERVAL_DAILY,
int $number_min = 0,
int $number_max = 0,
float $number_avg = 0,
?int $number_unique = null
) {
$utc = new DateTimeZone('UTC');
$moment = CarbonImmutable::parse($moment, $utc);
$this->moment = $moment->shiftTimezone($utc);
$this->station = $station;
$this->type = $type;
$this->timestamp = $timestamp ?? time();
$this->number_min = $number_min;
$this->number_max = $number_max;
$this->number_avg = $number_avg;
$this->number_avg = (string)round($number_avg, 2);
$this->number_unique = $number_unique;
}
public function getStation(): ?Station
{
return $this->station;
}
public function getId(): ?int
@ -99,9 +123,16 @@ class Analytics
return $this->type;
}
public function getTimestamp(): int
public function getMoment(): CarbonImmutable
{
return $this->timestamp;
return $this->moment;
}
public function getMomentInStationTimeZone(): CarbonImmutable
{
$tz = $this->station->getTimezoneObject();
$timestamp = CarbonImmutable::parse($this->moment, $tz);
return $timestamp->shiftTimezone($tz);
}
public function getNumberMin(): int
@ -114,13 +145,13 @@ class Analytics
return $this->number_max;
}
public function getNumberAvg(): int
public function getNumberAvg(): float
{
return $this->number_avg;
return round((float)$this->number_avg, 2);
}
public function getStation(): ?Station
public function getNumberUnique(): ?int
{
return $this->station;
return $this->number_unique;
}
}

View File

@ -0,0 +1,72 @@
<?php
namespace App\Entity\Fixture;
use App\Entity;
use Carbon\CarbonImmutable;
use Doctrine\Common\DataFixtures\AbstractFixture;
use Doctrine\Common\DataFixtures\DependentFixtureInterface;
use Doctrine\Persistence\ObjectManager;
class Analytics extends AbstractFixture implements DependentFixtureInterface
{
public function load(ObjectManager $em)
{
$stations = $em->getRepository(Entity\Station::class)->findAll();
$midnight_utc = CarbonImmutable::now('UTC')->setTime(0, 0);
for ($i = 1; $i <= 14; $i++) {
$day = $midnight_utc->subDays($i);
$day_min = 0;
$day_max = 0;
$day_listeners = 0;
$day_unique = 0;
foreach ($stations as $station) {
/** @var Entity\Station $station */
$station_listeners = random_int(10, 50);
$station_min = random_int(1, $station_listeners);
$station_max = random_int($station_listeners, 150);
$station_unique = random_int(1, 250);
$day_min = min($day_min, $station_min);
$day_max = max($day_max, $station_max);
$day_listeners += $station_listeners;
$day_unique += $station_unique;
$stationPoint = new Entity\Analytics(
$day,
$station,
Entity\Analytics::INTERVAL_DAILY,
$station_min,
$station_max,
$station_listeners,
$station_unique
);
$em->persist($stationPoint);
}
$totalPoint = new Entity\Analytics(
$day,
null,
Entity\Analytics::INTERVAL_DAILY,
$day_min,
$day_max,
$day_listeners,
$day_unique
);
$em->persist($totalPoint);
}
$em->flush();
}
public function getDependencies()
{
return [
Station::class,
];
}
}

View File

@ -0,0 +1,45 @@
<?php
declare(strict_types=1);
namespace App\Entity\Migration;
use Doctrine\DBAL\Schema\Schema;
use Doctrine\Migrations\AbstractMigration;
/**
* Auto-generated Migration: Please modify to your needs!
*/
final class Version20201006044905 extends AbstractMigration
{
public function getDescription(): string
{
return 'Analytics database improvements.';
}
public function up(Schema $schema): void
{
$this->addSql('DROP INDEX search_idx ON analytics');
$this->addSql('ALTER TABLE analytics ADD moment DATETIME(0) NOT NULL COMMENT \'(DC2Type:carbon_immutable)\', CHANGE number_avg number_avg NUMERIC(10, 2) NOT NULL, ADD number_unique INT');
$this->addSql('UPDATE analytics SET moment=FROM_UNIXTIME(timestamp)');
$this->addSql('ALTER TABLE analytics DROP timestamp');
$this->addSql('CREATE INDEX search_idx ON analytics (type, moment)');
}
public function down(Schema $schema): void
{
$this->addSql('DROP INDEX search_idx ON analytics');
$this->addSql('ALTER TABLE analytics ADD timestamp INT NOT NULL');
$this->addSql('UPDATE analytics SET new_timestamp=UNIX_TIMESTAMP(moment)');
$this->addSql('ALTER TABLE analytics DROP moment, DROP number_unique, CHANGE number_avg number_avg INT NOT NULL');
$this->addSql('CREATE INDEX search_idx ON analytics (type, timestamp)');
}
}

View File

@ -0,0 +1,41 @@
<?php
namespace App\Entity\Repository;
use App\Doctrine\Repository;
use App\Entity;
use DateTimeInterface;
class AnalyticsRepository extends Repository
{
public function findForStationAfterTime(
Entity\Station $station,
DateTimeInterface $threshold,
string $type = Entity\Analytics::INTERVAL_DAILY
): array {
return $this->em->createQuery(/** @lang DQL */ 'SELECT a
FROM App\Entity\Analytics a
WHERE a.station = :station
AND a.type = :type
AND a.moment >= :threshold')
->setParameter('station', $station)
->setParameter('type', $type)
->setParameter('threshold', $threshold)
->getArrayResult();
}
public function clearAllAfterTime(
DateTimeInterface $threshold
): void {
$this->em->createQuery(/** @lang DQL */ 'DELETE FROM App\Entity\Analytics a WHERE a.moment >= :threshold')
->setParameter('threshold', $threshold)
->execute();
}
public function clearAll(): void
{
$this->em->createQuery(/** @lang DQL */ 'DELETE FROM App\Entity\Analytics a')
->execute();
}
}

View File

@ -3,6 +3,7 @@ namespace App\Entity\Repository;
use App\Doctrine\Repository;
use App\Entity;
use DateTimeInterface;
use NowPlaying\Result\Client;
class ListenerRepository extends Repository
@ -11,22 +12,29 @@ class ListenerRepository extends Repository
* Get the number of unique listeners for a station during a specified time period.
*
* @param Entity\Station $station
* @param int $timestamp_start
* @param int $timestamp_end
* @param DateTimeInterface|int $start
* @param DateTimeInterface|int $end
*
* @return mixed
* @return int
*/
public function getUniqueListeners(Entity\Station $station, $timestamp_start, $timestamp_end)
public function getUniqueListeners(Entity\Station $station, $start, $end): int
{
return $this->em->createQuery(/** @lang DQL */ 'SELECT
if ($start instanceof DateTimeInterface) {
$start = $start->getTimestamp();
}
if ($end instanceof DateTimeInterface) {
$end = $end->getTimestamp();
}
return (int)$this->em->createQuery(/** @lang DQL */ 'SELECT
COUNT(DISTINCT l.listener_hash)
FROM App\Entity\Listener l
WHERE l.station_id = :station_id
AND l.timestamp_start <= :time_end
AND l.timestamp_end >= :time_start')
->setParameter('station_id', $station->getId())
->setParameter('time_end', $timestamp_end)
->setParameter('time_start', $timestamp_start)
->setParameter('time_end', $end)
->setParameter('time_start', $start)
->getSingleScalarResult();
}
@ -78,4 +86,10 @@ class ListenerRepository extends Repository
->execute();
}
}
public function clearAll(): void
{
$this->em->createQuery(/** @lang DQL */ 'DELETE FROM App\Entity\Listener l')
->execute();
}
}

View File

@ -6,6 +6,7 @@ use App\Doctrine\Repository;
use App\Entity;
use App\Settings;
use Carbon\CarbonInterface;
use DateTimeInterface;
use Doctrine\ORM\EntityManagerInterface;
use Psr\Http\Message\UriInterface;
use Psr\Log\LoggerInterface;
@ -226,4 +227,38 @@ class SongHistoryRepository extends Repository
->setMaxResults(1)
->getOneOrNullResult();
}
/**
* @param Entity\Station $station
* @param int|DateTimeInterface $start
* @param int|DateTimeInterface $end
*
* @return array [$minimumListeners, $maximumListeners, $averageListeners]
*/
public function getStatsByTimeRange(Entity\Station $station, $start, $end): array
{
if ($start instanceof DateTimeInterface) {
$start = $start->getTimestamp();
}
if ($end instanceof DateTimeInterface) {
$end = $end->getTimestamp();
}
$historyTotals = $this->em->createQuery(/** @lang DQL */ '
SELECT AVG(sh.listeners_end) AS listeners_avg, MAX(sh.listeners_end) AS listeners_max, MIN(sh.listeners_end) AS listeners_min
FROM App\Entity\SongHistory sh
WHERE sh.station = :station
AND sh.timestamp_end >= :start
AND sh.timestamp_start <= :end')
->setParameter('station', $station)
->setParameter('start', $start)
->setParameter('end', $end)
->getSingleResult();
$min = (int)$historyTotals['listeners_min'];
$max = (int)$historyTotals['listeners_max'];
$avg = round((float)$historyTotals['listeners_avg'], 2);
return [$min, $max, $avg];
}
}

View File

@ -29,8 +29,6 @@ class Stations
$backend = $request->getStationBackend();
$frontend = $request->getStationFrontend();
date_default_timezone_set($station->getTimezone());
$view->addData([
'station' => $station,
'frontend' => $frontend,

View File

@ -1,75 +0,0 @@
<?php
namespace App\Service;
use Doctrine\DBAL\Connection;
use Doctrine\ORM\EntityManagerInterface;
use Exception;
use InfluxDB;
use Redis;
class UptimeWait
{
protected Connection $db;
protected Redis $redis;
protected InfluxDB\Client $influx;
public function __construct(EntityManagerInterface $em, Redis $redis, InfluxDB\Database $influx)
{
$this->db = $em->getConnection();
$this->redis = $redis;
$this->influx = $influx->getClient();
}
public function waitForAll(): void
{
$this->waitForRedis();
$this->waitForInflux();
$this->waitForDatabase();
}
public function waitForDatabase(): void
{
$this->attempt(function () {
$this->db->connect();
});
}
public function waitForInflux(): void
{
$this->attempt(function () {
$this->influx->listDatabases();
});
}
public function waitForRedis(): void
{
$this->attempt(function () {
$this->redis->ping();
});
}
protected function attempt(callable $run)
{
$attempt = 0;
$maxAttempts = 10;
$baseWaitTime = 100;
$lastException = null;
while ($attempt < $maxAttempts) {
$waitTime = ($attempt ** 2) * $baseWaitTime;
usleep($waitTime * 1000);
$attempt++;
try {
return $run();
} catch (Exception $e) {
$lastException = $e;
}
}
throw $lastException;
}
}

View File

@ -2,123 +2,213 @@
namespace App\Sync\Task;
use App\Entity;
use Carbon\CarbonImmutable;
use Doctrine\ORM\EntityManagerInterface;
use InfluxDB\Database;
use Psr\Log\LoggerInterface;
class Analytics extends AbstractTask
{
protected Database $influx;
protected Entity\Repository\AnalyticsRepository $analyticsRepo;
protected Entity\Repository\ListenerRepository $listenerRepo;
protected Entity\Repository\SongHistoryRepository $historyRepo;
public function __construct(
EntityManagerInterface $em,
Entity\Repository\SettingsRepository $settingsRepo,
LoggerInterface $logger,
Database $influx
Entity\Repository\AnalyticsRepository $analyticsRepo,
Entity\Repository\ListenerRepository $listenerRepo,
Entity\Repository\SongHistoryRepository $historyRepo
) {
parent::__construct($em, $settingsRepo, $logger);
$this->influx = $influx;
$this->analyticsRepo = $analyticsRepo;
$this->listenerRepo = $listenerRepo;
$this->historyRepo = $historyRepo;
}
public function run(bool $force = false): void
{
$analytics_level = $this->settingsRepo->getSetting('analytics', Entity\Analytics::LEVEL_ALL);
if ($analytics_level === Entity\Analytics::LEVEL_NONE) {
$this->purgeAnalytics();
$this->purgeListeners();
} elseif ($analytics_level === Entity\Analytics::LEVEL_NO_IP) {
$this->purgeListeners();
} else {
$this->clearOldAnalytics();
switch ($analytics_level) {
case Entity\Analytics::LEVEL_NONE:
$this->purgeListeners();
$this->purgeAnalytics();
break;
case Entity\Analytics::LEVEL_NO_IP:
$this->purgeListeners();
$this->updateAnalytics(false);
break;
case Entity\Analytics::LEVEL_ALL:
$this->updateAnalytics(true);
break;
}
}
protected function updateAnalytics(bool $withListeners = true): void
{
$stationsRaw = $this->em->getRepository(Entity\Station::class)
->findAll();
/** @var Entity\Station[] $stations */
$stations = [];
foreach ($stationsRaw as $station) {
/** @var Entity\Station $station */
$stations[$station->getId()] = $station;
}
$now = CarbonImmutable::now('UTC');
$day = $now->subDays(5)->setTime(0, 0);// Clear existing analytics in this segment
$this->analyticsRepo->clearAllAfterTime($day);
while ($day < $now) {
$dailyUniqueListeners = null;
for ($hour = 0; $hour <= 23; $hour++) {
$hourUtc = $day->setTime($hour, 0);
$hourlyMin = 0;
$hourlyMax = 0;
$hourlyAverage = 0;
$hourlyUniqueListeners = null;
$hourlyStationRows = [];
foreach ($stations as $stationId => $station) {
$stationTz = $station->getTimezoneObject();
$start = $hourUtc->shiftTimezone($stationTz);
$end = $start->addHour();
[$min, $max, $avg] = $this->historyRepo->getStatsByTimeRange($station, $start, $end);
$unique = null;
if ($withListeners) {
$unique = $this->listenerRepo->getUniqueListeners($station, $start, $end);
$hourlyUniqueListeners ??= 0;
$hourlyUniqueListeners += $unique;
}
$hourlyRow = new Entity\Analytics(
$hourUtc,
$station,
Entity\Analytics::INTERVAL_HOURLY,
$min,
$max,
$avg,
$unique
);
$hourlyStationRows[$stationId][] = $hourlyRow;
$this->em->persist($hourlyRow);
$hourlyMin = min($hourlyMin, $min);
$hourlyMax = max($hourlyMax, $max);
$hourlyAverage += $avg;
}
// Post the all-stations hourly totals.
$hourlyAllStationsRow = new Entity\Analytics(
$hourUtc,
null,
Entity\Analytics::INTERVAL_HOURLY,
$hourlyMin,
$hourlyMax,
$hourlyAverage,
$hourlyUniqueListeners
);
$hourlyStationRows['all'][] = $hourlyAllStationsRow;
$this->em->persist($hourlyAllStationsRow);
}
// Aggregate daily totals.
$dailyMin = 0;
$dailyMax = 0;
$dailyAverages = [];
$dailyUniqueListeners = null;
foreach ($stations as $stationId => $station) {
$stationTz = $station->getTimezoneObject();
$stationDayStart = $day->shiftTimezone($stationTz);
$stationDayEnd = $stationDayStart->addDay();
$dailyStationMin = 0;
$dailyStationMax = 0;
$dailyStationAverages = [];
$hourlyRows = $hourlyStationRows[$stationId] ?? [];
foreach ($hourlyRows as $hourlyRow) {
/** @var Entity\Analytics $hourlyRow */
$dailyStationMin = min($dailyStationMin, $hourlyRow->getNumberMin());
$dailyStationMax = max($dailyStationMax, $hourlyRow->getNumberMax());
$dailyStationAverages[] = $hourlyRow->getNumberAvg();
}
$dailyMin = min($dailyMin, $dailyStationMin);
$dailyMax = max($dailyMax, $dailyStationMax);
$dailyStationUnique = null;
if ($withListeners) {
$dailyStationUnique = $this->listenerRepo->getUniqueListeners($station, $stationDayStart,
$stationDayEnd);
$dailyUniqueListeners ??= 0;
$dailyUniqueListeners += $dailyStationUnique;
}
$dailyStationAverage = round(array_sum($dailyStationAverages) / count($dailyStationAverages), 2);
$dailyAverages[] = $dailyStationAverage;
$dailyStationRow = new Entity\Analytics(
$day,
$station,
Entity\Analytics::INTERVAL_DAILY,
$dailyStationMin,
$dailyStationMax,
$dailyStationAverage,
$dailyStationUnique
);
$this->em->persist($dailyStationRow);
}
// Post the all-stations daily total.
$dailyAverage = round(array_sum($dailyAverages) / count($dailyAverages), 2);
$dailyAllStationsRow = new Entity\Analytics(
$day,
null,
Entity\Analytics::INTERVAL_DAILY,
$dailyMin,
$dailyMax,
$dailyAverage,
$dailyUniqueListeners
);
$this->em->persist($dailyAllStationsRow);
$this->em->flush();
// Loop to the next day.
$day = $day->addDay();
}
}
protected function purgeAnalytics(): void
{
$this->em->createQuery(/** @lang DQL */ 'DELETE FROM App\Entity\Analytics a')
->execute();
$this->influx->query('DROP SERIES FROM /.*/');
$this->analyticsRepo->clearAll();
}
protected function purgeListeners(): void
{
$this->em->createQuery(/** @lang DQL */ 'DELETE FROM App\Entity\Listener l')
->execute();
}
protected function clearOldAnalytics(): void
{
// Clear out any non-daily statistics.
$this->em->createQuery(/** @lang DQL */ 'DELETE FROM App\Entity\Analytics a WHERE a.type != :type')
->setParameter('type', 'day')
->execute();
// Pull statistics in from influx.
$resultset = $this->influx->query('SELECT * FROM "1d"./.*/ WHERE time > now() - 14d', [
'epoch' => 's',
]);
$results_raw = $resultset->getSeries();
$results = [];
foreach ($results_raw as $serie) {
$points = [];
foreach ($serie['values'] as $point) {
$points[] = array_combine($serie['columns'], $point);
}
$results[$serie['name']] = $points;
}
$new_records = [];
$earliest_timestamp = time();
foreach ($results as $stat_series => $stat_rows) {
$series_split = explode('.', $stat_series);
$station_id = ($series_split[1] === 'all') ? null : $series_split[1];
foreach ($stat_rows as $stat_row) {
if ($stat_row['time'] < $earliest_timestamp) {
$earliest_timestamp = $stat_row['time'];
}
$new_records[] = [
'station_id' => $station_id,
'type' => 'day',
'timestamp' => $stat_row['time'],
'number_min' => (int)$stat_row['min'],
'number_max' => (int)$stat_row['max'],
'number_avg' => round($stat_row['value']),
];
}
}
$this->em->createQuery(/** @lang DQL */ 'DELETE FROM App\Entity\Analytics a WHERE a.timestamp >= :earliest')
->setParameter('earliest', $earliest_timestamp)
->execute();
$all_stations = $this->em->getRepository(Entity\Station::class)->findAll();
$stations_by_id = [];
foreach ($all_stations as $station) {
$stations_by_id[$station->getId()] = $station;
}
foreach ($new_records as $row) {
if (empty($row['station_id']) || isset($stations_by_id[$row['station_id']])) {
$record = new Entity\Analytics(
$row['station_id'] ? $stations_by_id[$row['station_id']] : null,
$row['type'],
$row['timestamp'],
$row['number_min'],
$row['number_max'],
$row['number_avg']
);
$this->em->persist($record);
}
}
$this->em->flush();
$this->listenerRepo->clearAll();
}
}

View File

@ -16,12 +16,21 @@ class HistoryCleanup extends AbstractTask
->subDays($days_to_keep)
->getTimestamp();
// Clear Song History
$this->em->createQuery(/** @lang DQL */ 'DELETE
FROM App\Entity\SongHistory sh
WHERE sh.timestamp_start != 0
AND sh.timestamp_start <= :threshold')
->setParameter('threshold', $threshold)
->execute();
// Clear Listeners
$this->em->createQuery(/** @lang DQL */ 'DELETE
FROM App\Entity\Listener l
WHERE l.timestamp_start <= :threshold
AND l.timestamp_end IS NOT NULL')
->setParameter('threshold', $threshold)
->execute();
}
}
}

View File

@ -14,8 +14,6 @@ use App\Settings;
use Doctrine\ORM\EntityManagerInterface;
use Exception;
use GuzzleHttp\Psr7\Uri;
use InfluxDB\Database;
use InfluxDB\Point;
use Monolog\Logger;
use NowPlaying\Result\Result;
use Psr\Log\LoggerInterface;
@ -29,8 +27,6 @@ use function DeepCopy\deep_copy;
class NowPlaying extends AbstractTask implements EventSubscriberInterface
{
protected Database $influx;
protected CacheInterface $cache;
protected Adapters $adapters;
@ -59,7 +55,6 @@ class NowPlaying extends AbstractTask implements EventSubscriberInterface
ApiUtilities $api_utils,
AutoDJ $autodj,
CacheInterface $cache,
Database $influx,
LoggerInterface $logger,
EventDispatcher $event_dispatcher,
MessageBus $messageBus,
@ -77,7 +72,6 @@ class NowPlaying extends AbstractTask implements EventSubscriberInterface
$this->cache = $cache;
$this->event_dispatcher = $event_dispatcher;
$this->messageBus = $messageBus;
$this->influx = $influx;
$this->lockFactory = $lockFactory;
$this->history_repo = $historyRepository;
@ -105,38 +99,6 @@ class NowPlaying extends AbstractTask implements EventSubscriberInterface
{
$nowplaying = $this->_loadNowPlaying($force);
// Post statistics to InfluxDB.
if ($this->analytics_level !== Entity\Analytics::LEVEL_NONE) {
$influx_points = [];
$total_overall = 0;
foreach ($nowplaying as $info) {
$listeners = (int)$info->listeners->current;
$total_overall += $listeners;
$station_id = $info->station->id;
$influx_points[] = new Point(
'station.' . $station_id . '.listeners',
$listeners,
[],
['station' => $station_id],
time()
);
}
$influx_points[] = new Point(
'station.all.listeners',
$total_overall,
[],
['station' => 0],
time()
);
$this->influx->writePoints($influx_points, Database::PRECISION_SECONDS);
}
$this->cache->set(Entity\Settings::NOWPLAYING, $nowplaying, 120);
$this->settingsRepo->setSetting(Entity\Settings::NOWPLAYING, $nowplaying);
}

View File

@ -6,8 +6,6 @@ use App\Event\SendWebhooks;
use App\Service\NChan;
use App\Settings;
use GuzzleHttp\Client;
use InfluxDB\Database;
use InfluxDB\Point;
use Monolog\Logger;
use Psr\SimpleCache\CacheInterface;
use RuntimeException;
@ -21,8 +19,6 @@ class LocalWebhookHandler
protected Logger $logger;
protected Database $influx;
protected CacheInterface $cache;
protected Entity\Repository\SettingsRepository $settingsRepo;
@ -30,13 +26,11 @@ class LocalWebhookHandler
public function __construct(
Logger $logger,
Client $httpClient,
Database $influx,
CacheInterface $cache,
Entity\Repository\SettingsRepository $settingsRepo
) {
$this->logger = $logger;
$this->httpClient = $httpClient;
$this->influx = $influx;
$this->cache = $cache;
$this->settingsRepo = $settingsRepo;
}
@ -47,19 +41,6 @@ class LocalWebhookHandler
$station = $event->getStation();
if ($event->isStandalone()) {
$this->logger->debug('Writing entry to InfluxDB...');
// Post statistics to InfluxDB.
$influx_point = new Point(
'station.' . $station->getId() . '.listeners',
(int)$np->listeners->current,
[],
['station' => $station->getId()],
time()
);
$this->influx->writePoints([$influx_point], Database::PRECISION_SECONDS);
// Replace the relevant station information in the cache and database.
$this->logger->debug('Updating NowPlaying cache...');

View File

@ -15,7 +15,7 @@ done
if [[ "$1" == '--' ]]; then shift; fi
APP_ENV="${APP_ENV:-production}"
UPDATE_REVISION="${UPDATE_REVISION:-57}"
UPDATE_REVISION="${UPDATE_REVISION:-58}"
PKG_OK=$(dpkg-query -W --showformat='${Status}\n' ansible | grep "install ok installed")
echo "Checking for Ansible: $PKG_OK"

View File

@ -26,7 +26,6 @@
- ufw
- dbip
- composer
- influxdb
- services
- azuracast-build
- azuracast-setup

View File

@ -1,41 +1,13 @@
---
- name: Add InfluxDB repo file
apt_repository:
repo: "deb https://repos.influxdata.com/ubuntu {{ ansible_distribution_release | lower }} stable"
filename: "influxdb"
update_cache: no
register: influxdb_list
- name: Shut Down InfluxDB
service:
name: "influxdb"
state: stopped
ignore_errors: True
- name: Add InfluxDB repo key
apt_key:
url: https://repos.influxdata.com/influxdb.key
state: present
register: influxdb_key
- name: Update apt cache
apt: update_cache=yes
when:
- influxdb_list.changed == True or influxdb_key.changed == True
- name: Install InfluxDB
- name: Remove InfluxDB if Present
apt:
name: influxdb
state: latest
register: influx_installed
- name: Fix permissions on InfluxDB data folder
file: path="/var/lib/influxdb" state=directory owner=influxdb group=influxdb recurse=true
- name: Lock down InfluxDB to localhost requests
replace:
dest: /etc/influxdb/influxdb.conf
regexp: 'bind-address = ":80'
replace: 'bind-address = "localhost:80'
backup: yes
when: app_env == "production"
- name: Start the InfluxDB service
service: name=influxdb state=restarted enabled=yes
- name: Pause to allow InfluxDB to spin up
wait_for: port=8086 delay=5
name: "influxdb"
state: absent
force: yes
purge: yes

View File

@ -9,11 +9,7 @@
enabled: yes
state: restarted
with_items:
- "influxdb"
- "mysql"
- "php7.4-fpm"
- "nginx"
- "{{ redis_service_name }}"
- name: Pause to allow InfluxDB to spin up
wait_for: port=8086 delay=5
- "{{ redis_service_name }}"

View File

@ -20,7 +20,7 @@
- { role: redis, when: update_revision|int < 57 }
- { role: php, when: update_revision|int < 50 }
- composer
- { role: influxdb, when: update_revision|int < 56 }
- { role: influxdb, when: update_revision|int < 58 }
- { role: ufw, when: update_revision|int < 12 }
- { role: dbip, when: update_revision|int < 51 }
- { role: services, when: update_revision|int < 13 }

View File

@ -0,0 +1,10 @@
#!/bin/bash
set -e
if [[ -f /var/azuracast/www/bin/uptime_wait ]]; then
if ! php /var/azuracast/www/bin/uptime_wait; then
exit 1
fi
fi
exec "$@"

View File

@ -1,11 +0,0 @@
#!/bin/bash
set -e
source /bd_build/buildconfig
set -x
# wget -qO- https://repos.influxdata.com/influxdb.key | apt-key add -
# echo "deb https://repos.influxdata.com/ubuntu bionic stable" | tee /etc/apt/sources.list.d/influxdb.list
apt-get update
$minimal_apt_get_install influxdb