mirror of
https://gitlab.com/octospacc/Configs.git
synced 2025-03-12 08:40:16 +01:00
[Server] new system JS scripts
This commit is contained in:
parent
88d620b98f
commit
9c2cee4061
@ -6,7 +6,7 @@ h=home/tux
|
||||
|
||||
SetScope Root
|
||||
mkcd ./Root
|
||||
CpSufx etc/ diycron
|
||||
CpSufx etc/ diycron diycron.zx.mjs
|
||||
|
||||
for f in \
|
||||
diycron ncshell OneShot.AfterBoot bittorrentd SocatIpProxies \
|
||||
@ -26,14 +26,14 @@ mkcd ./Root
|
||||
; do CpItem "etc/systemd/system/Vm${f}.service"
|
||||
done
|
||||
|
||||
CpSufx "Main/Server/Scripts/*." sh
|
||||
CpSufx "Main/Server/Scripts/*." sh mjs
|
||||
CpSufx "Main/Server/Scripts/Backup/*." sh cfg
|
||||
CpItem Main/Server/Scripts/Interactive
|
||||
#CpItem Main/Server/Scripts/OneShot.AfterBoot.sh
|
||||
#CpItem Main/Server/Scripts/RenewCerts.sh
|
||||
|
||||
#CpItem Main/Server/Start/bittorrentd
|
||||
#CpItem Main/Transfers/aria2/Conf
|
||||
CpItem Main/Server/Start/aria2c
|
||||
CpItem Main/Transfers/aria2/Conf
|
||||
|
||||
ScopePath=/var/lib/lxc/Debian2023/rootfs/
|
||||
CpItem etc/nginx/nginx.conf
|
||||
|
@ -1,10 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
BackupsBase=/Main/Backup
|
||||
#BackupsBase="/media/Disk/Backup"
|
||||
|
||||
# ScriptDir="$( dirname "$( realpath "$0" )" )"
|
||||
# cd "$ScriptDir"
|
||||
|
||||
RunDate="$(date +%F)" # Current date in YYYY-MM-DD format
|
||||
cd "${BackupsBase}"
|
||||
|
@ -51,7 +51,9 @@ ServerBackupLimited(){
|
||||
#BackPathCrypt "FreshRSS-data" "${BackupKey_Git_FreshRSS}"
|
||||
#BackPathCrypt "shiori-data" "${BackupKey_Git_Shiori}"
|
||||
BackPathCrypt n8n-data "${BackupKey_Git_n8n}"
|
||||
BackPathCrypt script-server "${BackupKey_Git_scriptserver}"
|
||||
# "${BackupKey_Git_aria2}" ".7z"
|
||||
BackPathCrypt docker-mailserver "${BackupKey_Git_dockermailserver}"
|
||||
GitPush || true
|
||||
EchoExec cd ..
|
||||
}
|
||||
@ -84,10 +86,8 @@ DoSpaccCraftBackup(){
|
||||
McEdition="Beta-1.7.3"
|
||||
McGit="spacccraft-b1.7.3-backup4"
|
||||
DestPath="${BackupsBase}/${McGit}"
|
||||
#DestPath="${BackupsBase}/${McServer}/${McGit}"
|
||||
if [ -d "${DestPath}" ]
|
||||
then
|
||||
#cd "/Server/${McServer}"
|
||||
cd "${BackupsBase}/${McServer}"
|
||||
rm -rf "${DestPath}/${McEdition}" || true
|
||||
cp ./*.sh "${DestPath}/" || true
|
||||
|
@ -14,5 +14,6 @@ InvidiousPersonalJsonDump(){
|
||||
| 7z a -mmt1 -mx9 "./${Name}/${RunDate}.7z" -si && cp -v "./${Name}/${RunDate}.7z" "./${Name}/Latest.7z"
|
||||
}
|
||||
|
||||
#InvidiousPersonalJsonDump
|
||||
#InvidiousPersonalJsonDump || true
|
||||
|
||||
WriteLastLog
|
||||
|
@ -39,6 +39,8 @@ DoBackupSpaccBBS(){
|
||||
#SimpleBackup "FreshRSS-data"
|
||||
SimpleBackup FreshRSS www
|
||||
SimpleBackup n8n-data
|
||||
SimpleBackup script-server
|
||||
SimpleBackup docker-mailserver
|
||||
DoBackupShiori
|
||||
DoBackupSpaccBBS
|
||||
|
||||
|
100
Server/Root/Main/Server/Scripts/BackupAll.zx.mjs
Executable file
100
Server/Root/Main/Server/Scripts/BackupAll.zx.mjs
Executable file
@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env zx
|
||||
|
||||
let BackupsBase = '/Main/Backup';
|
||||
let Time = new Date();
|
||||
|
||||
// Note: not padding the year to 5 digits will break the scripts in ~8 millenia. The line should be fixed.
|
||||
Time.Stamp = `${Time.getFullYear()}-${(Time.getMonth() + 1).toString().padStart(2, '0')}-${Time.getDate().toString().padStart(2, '0')}`;
|
||||
cd(BackupsBase);
|
||||
|
||||
let [Jobs, Secrets] = [{}, {}];
|
||||
|
||||
// Import secrets from sh-formatted file
|
||||
for (let line of (await fs.readFile('./.BackupSecrects.sec', 'utf8')).split('\n')) {
|
||||
line = line.split('#')[0].trim();
|
||||
const key = line.split('=')[0];
|
||||
let val = line.split('=').slice(1).join('=');
|
||||
if ((val.startsWith('"') && val.endsWith('"')) || (val.startsWith("'") && val.endsWith("'"))) {
|
||||
val = val.slice(1, -1);
|
||||
};
|
||||
Secrets[key] = val;
|
||||
};
|
||||
|
||||
const Hash2 = async (BaseKey, Salt) => await $`echo "$(echo "${BaseKey}$(echo ${Salt} | sha512sum | base64 -w0)" | sha512sum | base64 -w0)"`.toString();
|
||||
|
||||
const ccencryptNow = async (File, BaseKey) => {
|
||||
await $`ccrypt -e -f -K"${Hash2(BaseKey, Time.Stamp)}" ${File}`;
|
||||
$`echo ${Time.Stamp} > ${File}.info`;
|
||||
};
|
||||
|
||||
const GitPush = async () => $`git add . && git commit -m "Auto-Backup ${Time}" && git push`;
|
||||
|
||||
const BackPathCrypt = async (Folder, Key, Ext) => {
|
||||
Ext ||= '.tar.xz';
|
||||
const File = `${Folder}${Ext}`;
|
||||
await $`cp "../${Folder}/Latest${Ext}" "./${File}" && ccencryptNow "./${File}" "${Key}"`;
|
||||
};
|
||||
|
||||
const SimpleCompress = async (Src, Dst) => await $`tar cJSf "${Dst}.tar.xz" ${Src}`;
|
||||
|
||||
const SimpleBackup = async (Folder, Prefix) => {
|
||||
await $`mkdir -vp "./${Folder}"`;
|
||||
// ...
|
||||
};
|
||||
|
||||
const Work = async (Job) => await within(Jobs[Job]);
|
||||
|
||||
///////////////////////////////////////
|
||||
|
||||
Jobs.Local_Simple_Backup = async()=>{
|
||||
SimpleBackup('FreshRSS', 'www');
|
||||
SimpleBackup('n8n-data');
|
||||
SimpleBackup('script-server');
|
||||
SimpleBackup('docker-mailserver');
|
||||
};
|
||||
|
||||
Jobs.Local_Shiori = async()=>{
|
||||
SimpleBackup('shiori-data', 'Shiori');
|
||||
$`rm -v ./shiori-data/Latest.d/archive/*`;
|
||||
};
|
||||
|
||||
Jobs.Local_SpaccBBS = async()=>{
|
||||
|
||||
};
|
||||
|
||||
Jobs.Cloud_ServerBackupLimited = async()=>{
|
||||
cd('./Server-Backup-Limited');
|
||||
BackPathCrypt('FreshRSS', Secrets.BackupKey_Git_FreshRSS);
|
||||
BackPathCrypt('n8n-data', Secrets.BackupKey_Git_n8n);
|
||||
BackPathCrypt('script-server', Secrets.BackupKey_Git_scriptserver);
|
||||
BackPathCrypt('docker-mailserver', Secrets.BackupKey_Git_dockermailserver);
|
||||
GitPush();
|
||||
};
|
||||
|
||||
Jobs.Cloud_ArticlesBackupPrivate = async()=>{
|
||||
cd('./Articles-Backup-Private');
|
||||
await $`rm -rf ./shiori-data`;
|
||||
await $`cp -rp ../shiori-data/Latest.d ./shiori-data`;
|
||||
GitPush();
|
||||
};
|
||||
|
||||
Jobs.Cloud_SpaccBBS = async()=>{
|
||||
cd('./SpaccBBS-Backup-phpBB-2023');
|
||||
};
|
||||
|
||||
Jobs.Cloud_SpaccCraft = async()=>{
|
||||
|
||||
};
|
||||
|
||||
///////////////////////////////////////
|
||||
|
||||
Work('Local_Simple_Backup');
|
||||
Work('Local_Shiori');
|
||||
Work('Local_SpaccBBS');
|
||||
|
||||
Work('Cloud_ServerBackupLimited');
|
||||
Work('Cloud_ArticlesBackupPrivate');
|
||||
Work('Cloud_SpaccBBS');
|
||||
Work('Cloud_SpaccCraft');
|
||||
|
||||
$`echo ${Time.Stamp} > ${BackupsBase}/Last.log`;
|
@ -11,8 +11,15 @@ done
|
||||
#Backup \
|
||||
|
||||
chown -R 1000:1000 /Main/Server/Desktop
|
||||
chown -R 101000:101000 /Main/Server/TelegramIndex-Fork
|
||||
#chmod -R 7777 /Main/Server/Desktop
|
||||
|
||||
for Dir in \
|
||||
Backup/SpaccCraft \
|
||||
Server/TelegramIndex-Fork \
|
||||
; do chown -R 101000:101000 "/Main/${Dir}"
|
||||
done
|
||||
|
||||
chown -R 100033:100033 /Main/Server/www
|
||||
#chmod -R 7777 /Main/Server/www
|
||||
#chmod -R 775 /Main/Server/www
|
||||
chmod -R 777 /Main/Server/www
|
||||
|
||||
chown -R tux:tux /Main/Clouds/octt/
|
||||
|
@ -11,12 +11,8 @@ Inputs = [Inputs];
|
||||
|
||||
for (let Input of Inputs) {
|
||||
let Ext = Input.split('.').slice(-1)[0];
|
||||
|
||||
let Intermid = Input;
|
||||
//let Intermid = `${Input}-${Math.random()}.${Ext}`;
|
||||
//await fs.copy(Input, Intermid);
|
||||
|
||||
//let Background = Input.split('.').slice(0,-1).join('.') + '.bg.' + Input.split('.').slice(-1)[0];
|
||||
let Background = `${Input}.bg.${Ext}`;
|
||||
await fs.copy(Input, Background);
|
||||
|
||||
@ -31,31 +27,12 @@ for (let Input of Inputs) {
|
||||
|
||||
let WhichPdf = await $`lp -d PDF -o scaling=${$.env.Scaling} -o position=${$.env.Position} ${Intermid}`;
|
||||
WhichPdf = WhichPdf.toString().split('request id is PDF-').slice(-1)[0].split(' ')[0];
|
||||
//WhichPdf = await $`s(){ $@ ;}; s $(s ${'ls ~/PDF/*-job_' + WhichPdf + '.pdf'})`;
|
||||
//await sleep(3000);
|
||||
//await $`ls ~/PDF/*-job_${WhichPdf}.pdf`;
|
||||
WhichPdf = await retry(999, expBackoff(), () => $`ls ~/PDF/*-job_${WhichPdf}.pdf`);
|
||||
WhichPdf = WhichPdf.toString().trim();
|
||||
//let ListPdf = await $`ls ~/PDF/*-job_*.pdf`;
|
||||
//for (let File of ListPdf.toString().split(' ')) {
|
||||
// echo`${File}`;
|
||||
// if (File.endsWith(`-job_${WhichPdf}.pdf`)) {
|
||||
// WhichPdf = File;
|
||||
// break;
|
||||
// };
|
||||
//};
|
||||
//echo`${WhichPdf}`;
|
||||
//let ListPdf = await $`ls "$HOME/PDF/"`;
|
||||
//echo`${ListPdf.toString().split(' ')}`;
|
||||
//ListPdf = ListPdf.toString().split(' ');
|
||||
//echo`1:${WhichPdf.toString()}`;
|
||||
//WhichPdf = await $`ls ~/PDF/*-job_${WhichPdf}.pdf`;
|
||||
//WhichPdf = await glob(`~/PDF/*-job_${WhichPdf}.pdf`);
|
||||
|
||||
let OutputPdf = `${Output}.cups-pdf.pdf`;
|
||||
// TODO: Actually wait for file to be surely moved, don't use a constant wait
|
||||
await sleep(1500);
|
||||
//await fs.copy(WhichPdf, OutputPdf);
|
||||
//await fs.unlink(WhichPdf);
|
||||
await fs.rename(WhichPdf, OutputPdf);
|
||||
await sleep(1500);
|
||||
|
||||
|
@ -1,9 +1,5 @@
|
||||
#!/bin/sh
|
||||
systemctl stop nginx
|
||||
#cd /etc/letsencrypt/live/
|
||||
#for Domain in *.octt.eu.org
|
||||
#do
|
||||
# certbot certonly --standalone -d $Domain
|
||||
#done
|
||||
certbot renew
|
||||
systemctl start nginx
|
||||
systemctl stop SocatIpProxies
|
||||
#certbot renew
|
||||
lxc-attach Debian2023 certbot renew
|
||||
systemctl start SocatIpProxies
|
||||
|
22
Server/Root/Main/Server/Start/aria2c
Executable file
22
Server/Root/Main/Server/Start/aria2c
Executable file
@ -0,0 +1,22 @@
|
||||
#!/bin/sh
|
||||
BkRunDate="$(date "+%Y-%m-%d.%H:%M:%S")"
|
||||
cd /Main/Transfers/aria2
|
||||
|
||||
# Backup
|
||||
mkdir -p ./Working.bak.d
|
||||
rm -rf ./Working.bak.d/* || true
|
||||
cp -r ./Service ./Working.bak.d/Service
|
||||
mkdir -p ./Working.bak.d/Meta
|
||||
cp ../Storage/*.aria2 ./Working.bak.d/Meta/ || true
|
||||
7z a -mx9 -mmt1 "./Working.bak.${BkRunDate}.7z" ./Working.bak.d
|
||||
#rm ./Working.bak.Latest.7z || true
|
||||
#ln -s "./Working.bak.${BkRunDate}.7z" ./Working.bak.Latest.7z
|
||||
|
||||
while true
|
||||
do
|
||||
# Get global trackers updated today
|
||||
Trackers="$(for i in $(curl https://ngosang.github.io/trackerslist/trackers_best.txt; curl https://newtrackon.com/api/stable); do echo "$i,"; done)"
|
||||
[ -n "$Trackers" ] && Trackers="--bt-tracker=$(echo $(echo "$Trackers") | sed 's/ //g')"
|
||||
|
||||
aria2c --conf-path=./Conf/Server.conf --daemon=false $Trackers
|
||||
done
|
21
Server/Root/Main/Server/VMs/Lubuntu2022NonProxied.sh
Executable file
21
Server/Root/Main/Server/VMs/Lubuntu2022NonProxied.sh
Executable file
@ -0,0 +1,21 @@
|
||||
#!/bin/sh
|
||||
cd "$( dirname "$( realpath "$0" )" )"
|
||||
while true; do
|
||||
|
||||
qemu-system-x86_64 \
|
||||
-accel kvm \
|
||||
-cpu host \
|
||||
-smp 2 \
|
||||
-m 700M \
|
||||
-hda ./Lubuntu2022NonProxied.qcow2 \
|
||||
-device e1000,netdev=net0 \
|
||||
-netdev user,id=net0,hostfwd=tcp::33891-:3389,hostfwd=udp::33891-:3389,hostfwd=tcp::50991-:5900,hostfwd=udp::50991-:5900 \
|
||||
-vnc :10 \
|
||||
;
|
||||
# -cdrom ./lubuntu-22.04.2-desktop-amd64.iso \
|
||||
# -netdev user,id=net0,hostfwd=tcp::33891-:3389,hostfwd=udp::33891-:3389 \
|
||||
# -vnc :10 \
|
||||
# -display none \
|
||||
|
||||
sleep 500
|
||||
done
|
@ -1,5 +1,6 @@
|
||||
#!/bin/sh
|
||||
cd "$( dirname "$( realpath "$0" )" )"
|
||||
while true; do
|
||||
|
||||
qemu-system-x86_64 \
|
||||
-accel kvm \
|
||||
@ -13,4 +14,5 @@ qemu-system-x86_64 \
|
||||
# -vnc :10 \
|
||||
# -display none \
|
||||
|
||||
while true; do sleep 999; done
|
||||
sleep 500
|
||||
done
|
||||
|
@ -1,5 +1,6 @@
|
||||
#!/bin/sh
|
||||
cd "$( dirname "$( realpath "$0" )" )"
|
||||
while true; do
|
||||
|
||||
#qemu-system-x86_64 \
|
||||
/opt/usr/bin/qemu-system-x86_64 \
|
||||
@ -25,4 +26,5 @@ cd "$( dirname "$( realpath "$0" )" )"
|
||||
# -vnc :10 \
|
||||
# -display none \
|
||||
|
||||
while true; do sleep 999; done
|
||||
sleep 999
|
||||
done
|
||||
|
@ -5,7 +5,7 @@
|
||||
# * https://gist.github.com/ifyour/2be0055adbaea83881aacaab905afd43
|
||||
|
||||
# Tip: load this file with: --conf-path=${FILE}
|
||||
# Note: ensure files /Transfers/aria2/Service/{Session.dat,Cookies.txt} exist before starting, otherwise create as empty
|
||||
# Note: ensure files /Main/Transfers/aria2/Service/{Session.dat,Cookies.txt} exist before starting, otherwise create as empty
|
||||
|
||||
|
||||
|
||||
@ -16,23 +16,23 @@ daemon=false
|
||||
|
||||
# Moderate logging to file
|
||||
log-level=notice
|
||||
log=/Transfers/aria2/Service/Log.log
|
||||
log=/Main/Transfers/aria2/Service/Log.log
|
||||
|
||||
# Remote control
|
||||
enable-rpc=true
|
||||
rpc-listen-all=true
|
||||
rpc-listen-port=6800
|
||||
rpc-allow-origin-all=true
|
||||
rpc-secret=octt.pi
|
||||
rpc-secret=octt.serbian
|
||||
|
||||
# Session data persistance
|
||||
save-session=/Transfers/aria2/Service/Session.dat
|
||||
input-file=/Transfers/aria2/Service/Session.dat
|
||||
save-cookies=/Transfers/aria2/Service/Cookies.txt
|
||||
load-cookies=/Transfers/aria2/Service/Cookies.txt
|
||||
save-session=/Main/Transfers/aria2/Service/Session.dat
|
||||
input-file=/Main/Transfers/aria2/Service/Session.dat
|
||||
save-cookies=/Main/Transfers/aria2/Service/Cookies.txt
|
||||
load-cookies=/Main/Transfers/aria2/Service/Cookies.txt
|
||||
|
||||
# Directory for storing downloads
|
||||
dir=/Transfers/Storage
|
||||
dir=/Main/Transfers/Storage
|
||||
|
||||
# Keep completed downloads in session (for seeding + stats)
|
||||
force-save=true
|
||||
@ -41,11 +41,11 @@ force-save=true
|
||||
allow-overwrite=true
|
||||
|
||||
# Cache storage
|
||||
dht-file-path=/Transfers/aria2/Service/DHT.dat
|
||||
dht-file-path=/Main/Transfers/aria2/Service/DHT.dat
|
||||
|
||||
# RAM Cache for reducing disk I/O - 2x default
|
||||
# RAM Cache for reducing disk I/O - 4x default
|
||||
# Note: if too high, some could end up in swap, and decrease performance
|
||||
disk-cache=32MB
|
||||
disk-cache=64MB
|
||||
|
||||
# Mapping files in memory consumes high ram
|
||||
# Note: can have issues on 32-bit systems
|
||||
@ -79,8 +79,8 @@ bt-load-saved-metadata=true
|
||||
bt-detach-seed-only=true
|
||||
|
||||
# Ports or ranges of - More ports avoids too much saturation (?)
|
||||
listen-port=6881-6883
|
||||
dht-listen-port=6881-6883
|
||||
listen-port=6881
|
||||
dht-listen-port=6881
|
||||
|
||||
# Peer discovery on the global Internet
|
||||
enable-dht=true
|
||||
|
@ -31,16 +31,16 @@ do
|
||||
# Inside here, declaration of all cronjobs like normal shell commands, made easy thanks to integrated functions.
|
||||
|
||||
# Trinity rotation backup system: each of the following scripts is executed every 3 days, in a rotation where at least 1 script runs every night at 3:00
|
||||
Ifn $Job2 && If $(IsDayMin $(hm2s 3 0)) && test $(($(date +%s) / 86400 % 3)) = 0 && Job2=1 && sleep 60 && Do sh /Main/Server/Scripts/Backup/ExternalDataBackup.sh #& # Local backup of external data
|
||||
Ifn $Job2 && If $(IsDayMin $(hm2s 3 0)) && test $(($(date +%s) / 86400 % 3)) = 1 && Job2=1 && sleep 60 && Do sh /Main/Server/Scripts/Backup/ServerDataBackup.sh #& # Big backup of local services data
|
||||
Ifn $Job2 && If $(IsDayMin $(hm2s 3 0)) && test $(($(date +%s) / 86400 % 3)) = 2 && Job2=1 && sleep 60 && Do sh /Main/Server/Scripts/Backup/CloudBackup.sh #& # Cloud backup of the locally backed-up data
|
||||
Ifn $Job2 && If $(IsDayMin $(hm2s 3 0)) && test $(($(date +%s) / 86400 % 3)) = 0 && Job2=1 && sleep 60 && Do sh -c "sh /Main/Server/Scripts/Backup/ExternalDataBackup.sh &" # Local backup of external data
|
||||
Ifn $Job2 && If $(IsDayMin $(hm2s 3 0)) && test $(($(date +%s) / 86400 % 3)) = 1 && Job2=1 && sleep 60 && Do sh -c "sh /Main/Server/Scripts/Backup/ServerDataBackup.sh &" # Big backup of local services data
|
||||
Ifn $Job2 && If $(IsDayMin $(hm2s 3 0)) && test $(($(date +%s) / 86400 % 3)) = 2 && Job2=1 && sleep 60 && Do sh -c "sh /Main/Server/Scripts/Backup/CloudBackup.sh &" # Cloud backup of the locally backed-up data
|
||||
|
||||
# System reboot every X days at 4:30 AM
|
||||
#If $(IsDayMin $(hm2s 4 30)) && test $(($(date +%s) / 86400 % 2)) = 0 && sleep 60 && reboot # System reboot every 2 days (every even day)
|
||||
# If $(IsDayMin $(hm2s 4 30)) && sleep 60 && reboot # System reboot every night
|
||||
|
||||
# Try to renew SSL certs every 5 days at 4 AM
|
||||
Ifn $JobCerts && If $(IsDayMin $(hm2s 4 0)) && test $(($(date +%s) / 86400 % 9)) = 0 && JobCerts=1 && sleep 60 && Do lxc-attach Debian2023 sh /Main/Server/Scripts/RenewCerts.sh
|
||||
Ifn $JobCerts && If $(IsDayMin $(hm2s 4 0)) && test $(($(date +%s) / 86400 % 9)) = 0 && JobCerts=1 && sleep 60 && Do sh /Main/Server/Scripts/RenewCerts.sh
|
||||
|
||||
# Status of all jobs is reset at one time of the day, before or after all execute or have executed (in time).
|
||||
If $(IsDayMin $(hm2s 0 0)) && Do ResetJobs
|
||||
|
66
Server/Root/etc/diycron.zx.mjs
Executable file
66
Server/Root/etc/diycron.zx.mjs
Executable file
@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env zx
|
||||
let Jobs = {};
|
||||
|
||||
const ResetJobs = () => (Jobs = {
|
||||
Backup: false,
|
||||
Certs: false,
|
||||
});
|
||||
ResetJobs();
|
||||
|
||||
const Work = (Job, Funct) => {
|
||||
if (!Jobs[Job]) {
|
||||
Jobs[Job] = true;
|
||||
Funct();
|
||||
};
|
||||
};
|
||||
|
||||
echo`=====[ diycron started at ${new Date()} ]=====`;
|
||||
|
||||
while (true) {
|
||||
|
||||
const T = new Date();
|
||||
//T.Y = T.getFullYear()
|
||||
T.M = (T.getMonth() +1);
|
||||
T.D = T.getDate();
|
||||
T.h = T.getHours();
|
||||
T.m = T.getMinutes();
|
||||
//T.s = T.getSeconds();
|
||||
T.is = (question) => {
|
||||
let allTrue = true;
|
||||
for (let predicate in question) {
|
||||
let word = predicate.replace('_', '');
|
||||
let oracle = {
|
||||
//Y: T.Y,
|
||||
M: T.M,
|
||||
D: T.D,
|
||||
h: T.h,
|
||||
m: T.m,
|
||||
//s: T.s,
|
||||
}[word];
|
||||
if (predicate.endsWith('_') && predicate.startsWith('_')) {
|
||||
!((oracle % question[predicate]) == 0) && (allTrue = false);
|
||||
} else if (predicate.endsWith('_')) {
|
||||
!(oracle <= question[predicate]) && (allTrue = false);
|
||||
} else if (predicate.startsWith('_')) {
|
||||
!(oracle >= question[predicate]) && (allTrue = false);
|
||||
} else {
|
||||
!(oracle == question[predicate]) && (allTrue = false);
|
||||
};
|
||||
};
|
||||
return allTrue;
|
||||
};
|
||||
|
||||
///////////////////////////////////////
|
||||
|
||||
//T.is({ h:'03', m_:'05' })
|
||||
// && Work('Backup', ()=>{ $`zx /Main/Server/Scripts/BackupAll.zx.mjs` });
|
||||
|
||||
T.is({ _D_:'9', h:'04', m_:'05' })
|
||||
&& Work('Certs', ()=>{ $`sh /Main/Server/Scripts/RenewCerts.sh` });
|
||||
|
||||
///////////////////////////////////////
|
||||
|
||||
(T.h=='00' && T.m=='00') && ResetJobs();
|
||||
await sleep(7500);
|
||||
|
||||
};
|
@ -2,7 +2,7 @@ map "" $LanLocalhost {
|
||||
default 192.168.1.125;
|
||||
}
|
||||
server {
|
||||
listen 81;
|
||||
listen 82;
|
||||
#location /admin {
|
||||
# proxy_http_version 1.1;
|
||||
# proxy_pass http://10.0.3.106;
|
||||
@ -63,4 +63,13 @@ server {
|
||||
add_header 'Cross-Origin-Opener-Policy' 'same-origin';
|
||||
add_header 'Cross-Origin-Resource-Policy' 'same-site';
|
||||
}
|
||||
|
||||
#location /pict.chat/ {
|
||||
# resolver 10.0.3.1 ipv6=off;
|
||||
# proxy_http_version 1.1;
|
||||
# proxy_pass https://pict.chat;
|
||||
# proxy_set_header Host "pict.chat";
|
||||
# proxy_set_header Upgrade $http_upgrade;
|
||||
# proxy_set_header Connection "upgrade";
|
||||
#}
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ server {
|
||||
ssl_certificate_key /etc/letsencrypt/live/private-analytics-not-for-public-use.octt.eu.org/privkey.pem;
|
||||
ssl_prefer_server_ciphers on;
|
||||
location /sitoctt/ {
|
||||
resolver 10.0.3.1;
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass https://sitoctt.goatcounter.com/;
|
||||
proxy_set_header Host "sitoctt.goatcounter.com";
|
||||
@ -16,6 +17,7 @@ server {
|
||||
proxy_set_header Connection "upgrade";
|
||||
}
|
||||
location /octtkb/ {
|
||||
resolver 10.0.3.1;
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass https://octtkb.goatcounter.com/;
|
||||
proxy_set_header Host "octtkb.goatcounter.com";
|
||||
|
@ -1,6 +1,6 @@
|
||||
server {
|
||||
listen 80;
|
||||
rewrite ^ https://$host$request_uri? permanent;
|
||||
#server {
|
||||
# listen 80;
|
||||
# rewrite ^ https://$host$request_uri? permanent;
|
||||
# location / {
|
||||
# #resolver 127.0.0.1;
|
||||
# proxy_http_version 1.1;
|
||||
@ -11,7 +11,7 @@ server {
|
||||
# proxy_set_header X-Forwarded-Proto $scheme;
|
||||
# proxy_set_header Upgrade $http_upgrade;
|
||||
# }
|
||||
}
|
||||
#}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
@ -25,7 +25,33 @@ server {
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
#listen 81;
|
||||
listen 81;
|
||||
error_page 403 = /error.php?code=403;
|
||||
error_page 404 = /error.php?code=404;
|
||||
error_page 500 = /error.php?code=500;
|
||||
access_log /var/log/nginx/root.access.log;
|
||||
error_log /var/log/nginx/root.error.log;
|
||||
location / {
|
||||
root /Main/Server/www/root;
|
||||
if ($request_uri ~ ^([^.\?]*[^/])$) {
|
||||
return 301 $1/;
|
||||
}
|
||||
try_files $uri $uri/ $uri.html =404;
|
||||
autoindex off;
|
||||
location ~ ^.+?\.php(/.*)?$ {
|
||||
fastcgi_pass php;
|
||||
fastcgi_split_path_info ^(.+\.php)(/.*)$;
|
||||
set $path_info $fastcgi_path_info;
|
||||
fastcgi_param PATH_INFO $path_info;
|
||||
include fastcgi_params;
|
||||
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
listen 81;
|
||||
listen 443 ssl;
|
||||
server_name hlb0.octt.eu.org;
|
||||
ssl_certificate /etc/letsencrypt/live/hlb0.octt.eu.org/fullchain.pem;
|
||||
|
@ -4,10 +4,10 @@ After=network.target
|
||||
[Service]
|
||||
Type=simple
|
||||
Restart=always
|
||||
RestartSec=10
|
||||
RestartSec=15
|
||||
#CPUQuota=90%
|
||||
#MemoryMax=400M
|
||||
User=root
|
||||
ExecStart=sh -c "cat /var/log/diycron.log >> /var/log/diycron.log.old; sh /etc/diycron > /var/log/diycron.log"
|
||||
ExecStart=sh -c "cat /var/log/diycron.log >> /var/log/diycron.log.old; zx /etc/diycron.zx.mjs > /var/log/diycron.log"
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
Loading…
x
Reference in New Issue
Block a user