125 Commits

Author SHA1 Message Date
ce116e6757 update d-mediawiki submodule 2020-04-27 17:40:15 -07:00
2c22c62339 fix docker-compose template 2020-04-27 17:10:24 -07:00
55b4cb49db fix restore database script commands 2020-04-27 17:10:00 -07:00
249293dbcf fix restore database config file in d-mysql 2020-04-27 16:33:10 -07:00
351ac25722 update d-mysql submodule pointer 2020-04-27 16:04:36 -07:00
daccdcc191 try to fix up restore db script 2020-04-27 16:03:19 -07:00
13a2c455a4 fix up restore db script 2020-04-27 15:29:37 -07:00
8c9f254ef5 fix permissions in restore_wikifiles 2020-04-27 15:22:30 -07:00
25e0d8e8d6 fix set -x and set +x commands 2020-04-27 15:19:56 -07:00
245684aefd remove unused utils-gitea dir 2020-04-27 15:06:52 -07:00
7eb16dbb1c remove backup wikifiles script 2020-04-27 15:06:36 -07:00
68f6c50e4d fix more path issues in backup dump scripts 2020-04-27 15:03:10 -07:00
9f164de798 fix path to backup file (2) 2020-04-27 15:01:35 -07:00
8933444965 fix path to backup file 2020-04-27 15:01:21 -07:00
4914c2819c use git user in gitea dump script 2020-04-27 14:59:03 -07:00
e4879d5f6f remove daily script 2020-04-27 14:54:30 -07:00
65dbd73595 remove monthly script 2020-04-27 14:54:20 -07:00
5882782865 update/add all backup dump scripts 2020-04-27 14:54:11 -07:00
50d5ff98e8 remove unused mysql backup script 2020-04-27 14:08:56 -07:00
562f163154 add new gitea and wikidb dump files 2020-04-27 14:06:58 -07:00
b4da61545d remove unused daily gitea script 2020-04-27 14:06:45 -07:00
dab23e6505 update backup gitea script 2020-04-27 13:27:13 -07:00
255bf10255 remove unused gitea utils 2020-04-27 13:11:15 -07:00
6d83e1eccc update d-gitea .gitignore 2020-04-27 12:33:32 -07:00
ee5902d7c0 fix d-gitea bind-mounting 2020-04-27 11:39:24 -07:00
1ed9a6a231 update d-gitea submodule to latest 2020-04-27 11:39:06 -07:00
84c474b31f Merge branch 'master' of github.com:charlesreid1-docker/pod-charlesreid1
* 'master' of github.com:charlesreid1-docker/pod-charlesreid1:
  add d-gitea submodule
  remove d-gitea submodule
2020-04-26 13:12:14 -07:00
f953bcba57 add d-gitea submodule 2020-04-26 13:09:40 -07:00
0e88276628 remove d-gitea submodule 2020-04-26 13:09:40 -07:00
d4483afc98 fix config file 2020-04-26 13:08:47 -07:00
c1d95506af add d-gitea submodule 2020-04-26 12:45:31 -07:00
d561e9dea8 remove d-gitea submodule 2020-04-26 12:43:43 -07:00
77c7cac926 remove unused scripts 2020-04-25 15:46:38 -07:00
8b4e7e862b update d-nginx-charlesreid1 2020-04-25 15:46:17 -07:00
6a7fcafd9e fix name of pod in gitea restore script 2019-07-26 22:27:07 -07:00
c6d9075ac3 take backup_gitea utility script out of commission until gitea dump is finally working 2019-07-26 22:26:39 -07:00
5f3080cd2b fix path to pod in gitea scrape script 2019-07-26 22:25:46 -07:00
20781b3c6a fix docs typo 2019-07-26 22:24:53 -07:00
64098c806f fix wikifiles backup script - some problematic docker commands 2019-07-25 16:04:53 -07:00
c090346b03 update memo on scripts 2019-07-25 16:00:18 -07:00
88c10dd4eb sleep a little less 2019-07-25 15:42:37 -07:00
68cb4e38d3 fix container name and set x placement in backup_wikifiles script 2019-07-25 15:35:28 -07:00
5cd1790039 update and print messages for backup_* scripts 2019-07-25 15:22:17 -07:00
e735372842 fix logfile names in backup scripts 2019-07-25 15:15:24 -07:00
a12ac7e22f update backup_gitea with a few small fixes 2019-07-25 15:14:57 -07:00
f588287930 fix variable name and container name 2019-07-25 15:13:56 -07:00
3a9a99ba12 fix run_cmd calls in backup scripts 2019-07-25 15:05:01 -07:00
3b712cfc44 fix datestamps in backup scripts 2019-07-25 14:55:59 -07:00
d3a24d844c add and update apply_templates* scripts 2019-07-25 14:55:44 -07:00
e1330531ff improve placement of set x statements in backup gitea script 2019-07-25 14:33:53 -07:00
3dde64de62 update mediawiki submodule 2019-07-25 11:20:47 -07:00
aaecd56bc6 fix the -C flag 2019-07-25 11:17:52 -07:00
5fc6953076 fix container name 2019-07-25 01:13:24 -07:00
e4a34b71e3 update mw skin 2019-07-25 01:13:00 -07:00
a323d65108 remove 2019-07-25 01:02:19 -07:00
7a9cb10e00 script is currently broken 2019-07-25 00:19:42 -07:00
b7cf150768 update restore database script 2019-07-24 23:23:13 -07:00
0291bfcc21 Merge branch 'master' of github.com:charlesreid1-docker/pod-charlesreid1
* 'master' of github.com:charlesreid1-docker/pod-charlesreid1:
  fix spew in daily backup scripts
  add backup_gitea script + service
  remove .j2 extension in service script
  add backup_wikifiles script + service
  fix typo
  add backup mysql script + service
  fix paths to scrape and assemble scripts
  add "user" argument to scrape_gitea_as_sudo script
  update gitea submodule
  temper temper
  Add databot service + script for forever loop
2019-07-24 23:19:53 -07:00
8041cd0458 rearrange set statements 2019-07-24 23:19:32 -07:00
f8814ecfd5 Merge remote-tracking branch 'cmr/spew-fix'
* cmr/spew-fix:
  fix spew in daily backup scripts
2019-07-24 20:46:24 -07:00
e5c145d180 add backup_gitea script + service 2019-07-24 16:20:44 -07:00
be8b77f615 remove .j2 extension in service script 2019-07-24 16:20:27 -07:00
ecfc2657da add backup_wikifiles script + service 2019-07-24 16:04:58 -07:00
5343fc14fb fix typo 2019-07-24 16:03:50 -07:00
8ac8bb3cb0 add backup mysql script + service 2019-07-24 15:57:14 -07:00
b0d5fbf94d fix paths to scrape and assemble scripts 2019-07-24 15:31:10 -07:00
9fdb94aae1 add "user" argument to scrape_gitea_as_sudo script 2019-07-24 13:32:13 -07:00
6504c60f92 update gitea submodule 2019-07-24 13:23:26 -07:00
b03db69305 Merge branch 'master' of github.com:charlesreid1-docker/pod-charlesreid1
* 'master' of github.com:charlesreid1-docker/pod-charlesreid1:
  update gitignore
  update modifications to app.ini in d-gitea
2019-07-24 13:10:38 -07:00
34fe316c28 temper temper 2019-07-24 09:50:18 -07:00
0df1c8c6cd update gitignore 2019-07-23 18:33:11 -07:00
1e90942d54 Merge branch 'master' of github.com:charlesreid1-docker/pod-charlesreid1
* 'master' of github.com:charlesreid1-docker/pod-charlesreid1:
  add forever loop scripts to gitignore file
  add a forever loop in python, and a corresponding service to run it forever
  fix imports in git clone data script
  add clarifying comments
  rename the script back to what it should be
2019-07-23 18:32:06 -07:00
77cb672f2c update modifications to app.ini in d-gitea 2019-07-23 18:31:20 -07:00
0c43293449 Add databot service + script for forever loop 2019-07-23 16:06:49 -07:00
d7e35ce4a7 add forever loop scripts to gitignore file 2019-07-22 19:30:42 -07:00
7bc3e0cfa5 add a forever loop in python, and a corresponding service to run it forever 2019-07-20 13:18:04 -07:00
7d65896461 fix imports in git clone data script 2019-07-20 13:17:29 -07:00
3d650cca74 add clarifying comments 2019-07-19 09:43:58 -07:00
7c2eabc025 rename the script back to what it should be 2019-07-19 09:32:13 -07:00
b44c9923cc update mediawiki submodule 2019-07-18 14:53:06 -07:00
17bf7c8bba update mediawiki submodule to latest 2019-07-18 14:41:04 -07:00
dfb4ab1f81 update fix_* scripts in mediawiki submodule 2019-07-18 13:31:58 -07:00
566ce5f875 fix fix_* scripts in d-mediawiki submodule 2019-07-18 13:14:07 -07:00
05d4718802 fix variable name typo 2019-07-17 19:14:43 -07:00
9afcee8dbd update git clone scripts 2019-07-17 19:08:30 -07:00
986e9bdbca use executor in git_clone_* scripts 2019-07-17 18:58:55 -07:00
4f8cc11d5e update git pull scripts again 2019-07-17 18:48:11 -07:00
a4119d21e9 update git pull scripts again 2019-07-17 18:45:07 -07:00
93631333d7 update git pull scripts 2019-07-17 18:42:26 -07:00
926ed5d5f4 update pull scripts duuumb 2019-07-17 18:36:51 -07:00
fd52e16659 modify print line 2019-07-17 16:24:05 -07:00
4ec7a01e38 update mkdocs-material 2019-07-17 14:52:01 -07:00
695e44efd6 fix print lines 2019-07-17 14:50:20 -07:00
c09e167c99 update/fix git pull data 2019-07-17 14:48:29 -07:00
316a12e9f8 handle stderr when commands fail 2019-07-17 11:44:08 -07:00
9a183685c3 update readme with todos 2019-07-17 11:23:29 -07:00
a8e605640d update git pull data script docstring 2019-07-17 11:22:19 -07:00
908d380fb5 add executioner, convert pull scripts to use executioner 2019-07-17 11:21:02 -07:00
e1017e776d copy executioner function to output in apply_templates script 2019-07-17 11:19:04 -07:00
e829de2399 fix git pull www script 2019-07-17 11:15:52 -07:00
ee454bd922 moving apply templates to account for git and pod 2019-07-16 11:53:05 -07:00
fbea6bb235 another typo fix 2019-07-16 11:50:37 -07:00
f5621154be update mkdocs-material to master 2019-07-16 11:46:52 -07:00
fda574e3d2 fix more typos, apply to charlesreid1.red 2019-07-16 11:44:30 -07:00
09d475efae fix typo 2019-07-16 11:41:55 -07:00
fd1c2b18a0 fix git.htdocs folder name 2019-07-16 11:32:14 -07:00
95cc9dfb20 update apply_templates script for testing 2019-07-16 08:58:04 -07:00
0863b01bb7 cleanup 2019-07-16 08:30:17 -07:00
ed711bee04 Move git shell scripts to attic 2019-07-16 08:29:31 -07:00
9732564bf4 add git pull scripts and apply_template script 2019-07-16 08:22:56 -07:00
e8834a3ac8 improve logic in git clone scripts 2019-07-16 08:22:42 -07:00
a4ca100dff move git clone www and git clone data scripts to templates 2019-07-15 17:45:11 -07:00
92a255ff6e update d-nginx-charlesreid1 submodule 2019-07-14 12:58:43 -07:00
e2bfba4143 fix variable name in d-nginx-charlesreid1 config file 2019-07-14 12:41:02 -07:00
6d8a1021f7 update comment in d-nginx-charlesreid1 2019-07-13 08:26:10 -07:00
645d15f94f update mediawiki submodule 2019-07-12 21:59:44 -07:00
f0bd402e9b update d-nginx-charlesreid1 submodule 2019-07-12 21:56:45 -07:00
490187bc4f update nginx templates to hard-code subdomains 2019-07-12 21:35:13 -07:00
fbd72ebf2a add apply templates script 2019-07-12 21:29:39 -07:00
77c313141d clean up docker-compose template 2019-07-12 21:29:24 -07:00
06308ad8ca replace charlesreid1.com with server_name_default 2019-07-12 16:34:35 -07:00
ab168da9ce tidy up gitea app.ini 2019-07-12 10:46:49 -07:00
1127114a72 add comment+link 2019-07-12 09:30:14 -07:00
fba385f098 fix submodule import typo 2019-07-12 00:21:54 -07:00
f3c6b55b09 change gh org for mkdocs-material 2019-07-12 00:15:04 -07:00
49 changed files with 1175 additions and 682 deletions

12
.gitignore vendored
View File

@@ -2,3 +2,15 @@ site
root.password root.password
docker-compose.yml docker-compose.yml
*.zip *.zip
scripts/output
scripts/forever_loop.py.j2
scripts/forever_loop.service.j2
scripts/executioner.pyc
scripts/git_clone_www.py
scripts/git_pull_www.py
scripts/git_clone_data.py
scripts/git_pull_data.py

8
.gitmodules vendored
View File

@@ -10,12 +10,12 @@
[submodule "d-nginx-charlesreid1"] [submodule "d-nginx-charlesreid1"]
path = d-nginx-charlesreid1 path = d-nginx-charlesreid1
url = git@github.com:charlesreid1-docker/d-nginx-charlesreid1.git url = git@github.com:charlesreid1-docker/d-nginx-charlesreid1.git
[submodule "d-gitea"]
path = d-gitea
url = git@github.com:charlesreid1-docker/d-gitea.git
[submodule "d-python-files"] [submodule "d-python-files"]
path = d-python-files path = d-python-files
url = git@github.com:charlesreid1-docker/d-python-files.git url = git@github.com:charlesreid1-docker/d-python-files.git
[submodule "mkdocs-material"] [submodule "mkdocs-material"]
path = mkdocs-material path = mkdocs-material
url = git@github.com:charlesreid1/mkdocs-material.git url = git@github.com:charlesreid1-docker/mkdocs-material.git
[submodule "d-gitea"]
path = d-gitea
url = git@github.com:charlesreid1-docker/d-gitea.git

42
apply_templates.py Normal file
View File

@@ -0,0 +1,42 @@
import os, re, sys
from jinja2 import Environment, FileSystemLoader, select_autoescape
"""
Apply Default Values to Jinja Templates
This script applies default values to
docker-compose.yml file.
The template is useful for Ansible,
but this is useful for experiments/one-offs.
"""
# Where templates live
TEMPLATEDIR = '.'
# Where rendered templates will go
OUTDIR = '.'
# Should existing files be overwritten
OVERWRITE = False
env = Environment(loader=FileSystemLoader('.'))
tfile = 'docker-compose.yml.j2'
rfile = 'docker-compose.yml'
content = env.get_template(tfile).render({
"server_name_default" : "charlesreid1.com",
"mediawiki_secretkey" : "asdfqwerty_oiuqoweiruoasdfi",
"mysql_password" : "MySuperSecretPassword"
})
# Write to file
if os.path.exists(rfile) and not OVERWRITE:
raise Exception("Error: file %s already exists!"%(rfile))
else:
with open(rfile,'w') as f:
f.write(content)

Submodule d-gitea updated: 0eff502763...983cd1bf18

Submodule d-mysql updated: 4bd88e74c1...20e811f8de

View File

@@ -1,6 +1,9 @@
version: "3.3" version: "3.3"
services: services:
# Note: depends_on is from here
# https://stackoverflow.com/a/39039830
stormy_gitea: stormy_gitea:
image: gitea/gitea:latest image: gitea/gitea:latest
environment: environment:
@@ -9,9 +12,8 @@ services:
restart: always restart: always
volumes: volumes:
- "stormy_gitea_data:/data" - "stormy_gitea_data:/data"
- "./d-gitea/custom/conf:/data/gitea/conf" - "./d-gitea/custom:/data/gitea"
- "./d-gitea/custom/public:/data/gitea/public" - "./d-gitea/data:/app/gitea/data"
- "./d-gitea/custom/templates:/data/gitea/templates"
logging: logging:
driver: "json-file" driver: "json-file"
options: options:
@@ -42,48 +44,53 @@ services:
max-size: 1m max-size: 1m
max-file: "10" max-file: "10"
environment: environment:
- MEDIAWIKI_SITE_SERVER=https://charlesreid1.com - MEDIAWIKI_SITE_SERVER=https://{{ server_name_default }}
- MEDIAWIKI_SECRETKEY={{ mediawiki_secretkey }} - MEDIAWIKI_SECRETKEY={{ mediawiki_secretkey }}
- MYSQL_HOST=mysql - MYSQL_HOST=stormy_mysql
- MYSQL_DATABASE=wikidb - MYSQL_DATABASE=wikidb
- MYSQL_USER=root - MYSQL_USER=root
- MYSQL_PASSWORD={{ mysql_password }} - MYSQL_PASSWORD={{ mysql_password }}
depends_on:
- stormy_mysql
stormy_nginx: stormy_nginx:
restart: always restart: always
image: nginx image: nginx
hostname: charlesreid1.com hostname: {{ server_name_default }}
command: /bin/bash -c "nginx -g 'daemon off;'" command: /bin/bash -c "nginx -g 'daemon off;'"
volumes: volumes:
- "./d-nginx-charlesreid1/conf.d:/etc/nginx/conf.d:ro" - "./d-nginx-charlesreid1/conf.d:/etc/nginx/conf.d:ro"
- "/etc/localtime:/etc/localtime:ro" - "/etc/localtime:/etc/localtime:ro"
- "/etc/letsencrypt:/etc/letsencrypt" - "/etc/letsencrypt:/etc/letsencrypt"
- "/www/charlesreid1.blue/htdocs:/www/charlesreid1.blue/htdocs:ro" - "/www/{{ server_name_default }}/htdocs:/www/{{ server_name_default }}/htdocs:ro"
- "/www/charlesreid1.red/htdocs:/www/charlesreid1.red/htdocs:ro"
- "/www/charlesreid1.com/htdocs:/www/charlesreid1.com/htdocs:ro"
logging: logging:
driver: "json-file" driver: "json-file"
options: options:
max-size: 1m max-size: 1m
max-file: "10" max-file: "10"
depends_on:
- stormy_mw
- stormy_gitea
#- stormy_files
#- stormy_myadmin
ports: ports:
- "80:80" - "80:80"
- "443:443" - "443:443"
### stormy_files:
### image: python:3.6
### command: bash -c "cd /files && python3 -m http.server 8081"
### volumes:
### - "/www/files.{{ server_name_default }}:/files"
### logging:
### driver: "json-file"
### stormy_myadmin: ### stormy_myadmin:
### image: "phpmyadmin/phpmyadmin" ### image: "phpmyadmin/phpmyadmin"
### links: ### links:
### - stormy_mysql:db ### - stormy_mysql:db
### environment: ### environment:
### - PMA_ABSOLUTE_URI=charlesreid1.com/phpMyAdmin ### - PMA_ABSOLUTE_URI={{ server_name_default }}/phpMyAdmin
### stormy_files:
### image: python:3.6
### command: bash -c "cd /files && python3 -m http.server 8081"
### volumes:
### - "/www/files:/files"
### logging:
### driver: "json-file"
volumes: volumes:
stormy_mysql_data: stormy_mysql_data:

View File

@@ -3,6 +3,16 @@
Contains useful scripts for setting up and maintaining Contains useful scripts for setting up and maintaining
the charlesreid1.com docker pod. the charlesreid1.com docker pod.
## TODO
Update:
- jinja templates
- apply template scripts
- executioner
- forever tasks and forever loops
## `dockerpod-charlesreid1.service` ## `dockerpod-charlesreid1.service`
This .service script is a systemd startup script that This .service script is a systemd startup script that
@@ -34,17 +44,3 @@ This script pulls the latest changes from the
`gh-pages` branch in the `/www/` folder cloned `gh-pages` branch in the `/www/` folder cloned
with the `git_clone_www.sh` script. with the `git_clone_www.sh` script.
## `git_clone_data.sh`
This clones the data repository (under version control
at <https://git.charlesreid1.com/data/charlesreid1>)
into the `/www` folder cloned with the `git_clone_www.sh`
script.
## `git_pull_data.sh`
This script pulls the latest changes to the
charlesreid1-data repository and updates the
`data/` folder in the `/www/charlesreid1.com/htdocs`
folder.

View File

@@ -0,0 +1,88 @@
import os, re, sys
import glob
import subprocess
from jinja2 import Environment, FileSystemLoader, select_autoescape
"""
Apply Default Values to Jinja Templates
This script applies default values to
templates in this folder.
The templates are used by Ansible,
but this script uses the same template
engine as Ansible to apply template
variable values to the template files
and make real files.
only variables are:
- `username` - user/group name to change ownership to
- `pod_install_dir` - installation directory of pod
"""
# Where templates live
TEMPLATEDIR = '.'
# Where rendered templates will go
OUTDIR = 'output'
# Should existing (destination) files
# be overwritten if they exist?
OVERWRITE = True
# Template variables
TV = {
'pod_install_dir': '/home/charles/pod-charlesreid1',
'username': 'charles'
}
def apply_templates(template_dir, output_dir, template_vars, overwrite=False):
"""Apply the template variables
to the template files.
"""
if not os.path.exists(output_dir):
msg = "Error: output dir %s does not exist!"%(output_dir)
raise Exception(msg)
if not os.path.exists(template_dir):
msg = "Error: template dir %s does not exist!"%(output_dir)
raise Exception(msg)
# Jinja env
env = Environment(loader=FileSystemLoader('.'))
# Render templates
template_files = glob.glob('backup_*')
render_files = [re.sub('\.j2','',s) for s in template_files]
for rfile,tfile in zip(render_files,template_files):
# Get rendered template content
content = env.get_template(tfile).render(**template_vars)
# Write to file
dest = os.path.join(output_dir,rfile)
if os.path.exists(dest) and overwrite is False:
msg = "Error: template rendering destination %s already exists!"%(dest)
raise Exception(msg)
with open(dest,'w') as f:
f.write(content)
x = 'executioner.py'
subprocess.call(['cp',x,os.path.join(output_dir,x)])
print("Rendered the following templates:%s\nOutput files:%s\n"%(
"".join(["\n- "+os.path.join(template_dir,j) for j in template_files]),
"".join(["\n- "+os.path.join(output_dir,j) for j in render_files])
))
if __name__=="__main__":
apply_templates(TEMPLATEDIR,OUTDIR,TV,OVERWRITE)

View File

@@ -0,0 +1,89 @@
import os, re, sys
import glob
import subprocess
from jinja2 import Environment, FileSystemLoader, select_autoescape
"""
Apply Default Values to Jinja Templates
This script applies default values to
templates in this folder.
The templates are used by Ansible,
but this script uses the same template
engine as Ansible to apply template
variable values to the template files
and make real files.
only variables are:
- `username` - user/group name to change ownership to
- `server_name_default` - name of server
(e.g., charlesreid1.com or charlesreid1.red)
"""
# Where templates live
TEMPLATEDIR = '.'
# Where rendered templates will go
OUTDIR = 'output'
# Should existing (destination) files
# be overwritten if they exist?
OVERWRITE = True
# Template variables
TV = {
'server_name_default': 'charlesreid1.red',
'username': 'charles'
}
def apply_templates(template_dir, output_dir, template_vars, overwrite=False):
"""Apply the template variables
to the template files.
"""
if not os.path.exists(output_dir):
msg = "Error: output dir %s does not exist!"%(output_dir)
raise Exception(msg)
if not os.path.exists(template_dir):
msg = "Error: template dir %s does not exist!"%(output_dir)
raise Exception(msg)
# Jinja env
env = Environment(loader=FileSystemLoader('.'))
# Render templates
template_files = glob.glob('git_*.py.j2')
render_files = [re.sub('\.j2','',s) for s in template_files]
for rfile,tfile in zip(render_files,template_files):
# Get rendered template content
content = env.get_template(tfile).render(**template_vars)
# Write to file
dest = os.path.join(output_dir,rfile)
if os.path.exists(dest) and overwrite is False:
msg = "Error: template rendering destination %s already exists!"%(dest)
raise Exception(msg)
with open(dest,'w') as f:
f.write(content)
x = 'executioner.py'
subprocess.call(['cp',x,os.path.join(output_dir,x)])
print("Rendered the following templates:%s\nOutput files:%s\n"%(
"".join(["\n- "+os.path.join(template_dir,j) for j in template_files]),
"".join(["\n- "+os.path.join(output_dir,j) for j in render_files])
))
if __name__=="__main__":
apply_templates(TEMPLATEDIR,OUTDIR,TV,OVERWRITE)

View File

@@ -0,0 +1,82 @@
import os, re, sys
import glob
from jinja2 import Environment, FileSystemLoader, select_autoescape
"""
Apply Default Values to Jinja Templates
for pod-charlesreid1 startup service
This script applies default values to
templates in this folder.
The templates are used by Ansible,
but this script uses the same template
engine as Ansible to apply template
variable values to the template files
and make real files.
only variables are:
- `pod_install_dir` - location where pod-charlesreid1 repo is
"""
# Where templates live
TEMPLATEDIR = '.'
# Where rendered templates will go
OUTDIR = 'output'
# Should existing (destination) files
# be overwritten if they exist?
OVERWRITE = True
# Template variables
TV = { 'pod_install_dir' : '/home/charles/pod-charlesreid1' }
def apply_templates(template_dir, output_dir, template_vars, overwrite=False):
"""Apply the template variables
to the template files.
"""
if not os.path.exists(output_dir):
msg = "Error: output dir %s does not exist!"%(output_dir)
raise Exception(msg)
if not os.path.exists(template_dir):
msg = "Error: template dir %s does not exist!"%(output_dir)
raise Exception(msg)
# Jinja env
env = Environment(loader=FileSystemLoader('.'))
# Render templates
template_files = glob.glob('dockerpod*') \
+ glob.glob('databot*')
render_files = [re.sub('\.j2','',s) for s in template_files]
for rfile,tfile in zip(render_files,template_files):
# Get rendered template content
content = env.get_template(tfile).render(**template_vars)
# Write to file
dest = os.path.join(output_dir,rfile)
if os.path.exists(dest) and overwrite is False:
msg = "Error: template rendering destination %s already exists!"%(dest)
raise Exception(msg)
with open(dest,'w') as f:
f.write(content)
print("Rendered the following templates:%s\nOutput files:%s\n"%(
"".join(["\n- "+os.path.join(template_dir,j) for j in template_files]),
"".join(["\n- "+os.path.join(output_dir,j) for j in render_files])
))
if __name__=="__main__":
apply_templates(TEMPLATEDIR,OUTDIR,TV,OVERWRITE)

21
scripts/attic/Readme.md Normal file
View File

@@ -0,0 +1,21 @@
# the scripts attic
These four scripts use a git clone or
git pull command that is useful because
it uses a non-standard dot git/clone
directory layout.
However, these do nothing to ensure
that said folders exist and have
correct permissions, and in some
cases these scripts were fed incorrect
info, so better to use the python
templates in the parent directory.
```
git_clone_data.sh
git_clone_www.sh
git_pull_data.sh
git_pull_www.sh
```

View File

@@ -4,17 +4,7 @@
# master branch to the /www/${DOMAIN} # master branch to the /www/${DOMAIN}
# directory structure # directory structure
if [[ "$#" -eq "0" ]]; then DOMAIN="{{ server_name_default }}"
# default value
DOMAIN="charlesreid1.com"
elif [[ "$#" -eq "1" ]]; then
# user-provided value
DOMAIN="$1"
else
# huh?
echo "git_clone_data.sh takes 0 or 1 input arguments, you provided $#"
exit 1;
fi
REPOURL="https://git.charlesreid1.com/data/charlesreid1-data.git" REPOURL="https://git.charlesreid1.com/data/charlesreid1-data.git"

View File

@@ -28,7 +28,7 @@ mkdir -p /www/${DOMAIN}
# /www/<domain>/htdocs does not exist # /www/<domain>/htdocs does not exist
if [ ! -d "/www/${DOMAIN}/htdocs" ]; then if [ ! -d "/www/${DOMAIN}/htdocs" ]; then
echo "Cloning repo for ${DOMAIN} to /wwww" echo "Cloning repo for ${DOMAIN} to /www"
git -C /www/${DOMAIN} \ git -C /www/${DOMAIN} \
clone \ clone \

View File

@@ -0,0 +1,75 @@
#!/usr/bin/env python3
import subprocess
import os, sys
import time
from datetime import datetime
from executioner import execute
"""
Forever Loop of Gitea Backup Task
Run a forever loop to back up Gitea instance.
"""
def task(pod_dir,username):
work_dir = os.path.join(pod_dir,'scripts')
gitea_dir = os.path.join(pod_dir,'utils-gitea')
backup_dir = os.path.join('/backups','gitea')
def run_cmd(f,cmd):
"""This runs a command and writes the output to a log file."""
f.write("About to run the following command for backup gitea task:\n")
f.write(" $ " + " ".join(cmd))
f.write("\n")
f.write("Command Output:")
for loo in execute(cmd):
f.write(loo)
f.write("\n")
f.write("Done.")
d = datetime.now().strftime('%Y-%m-%d_%H%M')
logfile = '/tmp/backup_gitea_forever_%s.log'%(d)
print("Running task. Log file: %s"%(logfile))
with open(logfile,'w') as f:
py_bin = sys.executable
# Step 1:
# Make sure work dir and backup dir exist
if not os.path.exists(work_dir):
# Quit if working dir does not exist
return
if not os.path.exists(backup_dir):
# Make backup dir if it does not exist
subprocess.call(['mkdir','-p',backup_dir])
subprocess.call(['chown',username+':'+username,backup_dir])
time.sleep(5)
# Step 2:
# Back up gitea to file
backup_script = os.path.join(gitea_dir,'backup_gitea.sh')
backup_target = backup_dir
backup_cmd = [backup_script,backup_target]
run_cmd(f,backup_cmd)
time.sleep(5)
if __name__=="__main__":
# Run a forever loop
time.sleep(5)
while True:
task('{{ pod_install_dir }}','{{ username }}')
fudge = 30
two_weeks = 2*7*24*60*60 - fudge
time.sleep(two_weeks)

View File

@@ -0,0 +1,11 @@
[Unit]
Description=do the gitea backup task with a forever loop
[Service]
Restart=always
ExecStart=/usr/bin/python {{ pod_install_dir }}/scripts/backup_gitea.py
ExecStop=/usr/bin/pgrep -f backup_gitea | /usr/bin/xargs /bin/kill
[Install]
WantedBy=default.target

View File

@@ -0,0 +1,76 @@
#!/usr/bin/env python3
import subprocess
import os, sys
import time
from datetime import datetime
from executioner import execute
"""
Forever Loop of MySQL Backup Task
Run a forever loop to back up MySQL databases.
"""
def task(pod_dir,username):
work_dir = os.path.join(pod_dir,'scripts')
mysql_dir = os.path.join(pod_dir,'utils-mysql')
backup_dir = os.path.join('/backups','mysql')
def run_cmd(f,cmd):
"""This runs a command and writes the output to a log file."""
f.write("About to run the following command for backup mysql task:\n")
f.write(" $ " + " ".join(cmd))
f.write("\n")
f.write("Command Output:")
for loo in execute(cmd):
f.write(loo)
f.write("\n")
f.write("Done.")
d = datetime.now().strftime('%Y-%m-%d_%H%M')
logfile = '/tmp/backup_mysql_forever_%s.log'%(d)
print("Running task. Log file: %s"%(logfile))
with open(logfile,'w') as f:
py_bin = sys.executable
# Step 1:
# Make sure work dir and backup dir exist
if not os.path.exists(work_dir):
# Quit if working dir does not exist
return
if not os.path.exists(backup_dir):
# Make backup dir if it does not exist
subprocess.call(['mkdir','-p',backup_dir])
subprocess.call(['chown',username+':'+username,backup_dir])
time.sleep(5)
# Step 2:
# Back up SQL database to file
backup_script = os.path.join(mysql_dir,'dump_database.sh')
sql_file = 'wikidb_%s.sql'%(d)
backup_target = os.path.join(backup_dir,sql_file)
backup_cmd = [backup_script,backup_target]
run_cmd(f,backup_cmd)
time.sleep(5)
if __name__=="__main__":
# Run a forever loop
time.sleep(10)
while True:
task('{{ pod_install_dir }}','{{ username }}')
fudge = 30
one_week = 7*24*60*60 - fudge
time.sleep(one_week)

View File

@@ -0,0 +1,11 @@
[Unit]
Description=do the mysql backup task with a forever loop
[Service]
Restart=always
ExecStart=/usr/bin/python {{ pod_install_dir }}/scripts/backup_mysql.py
ExecStop=/usr/bin/pgrep -f backup_mysql | /usr/bin/xargs /bin/kill
[Install]
WantedBy=default.target

View File

@@ -0,0 +1,76 @@
#!/usr/bin/env python3
import subprocess
import os, sys
import time
from datetime import datetime
from executioner import execute
"""
Forever Loop of MySQL Backup Task
Run a forever loop to back up MySQL databases.
"""
def task(pod_dir,username):
work_dir = os.path.join(pod_dir,'scripts')
mw_dir = os.path.join(pod_dir,'utils-mw')
backup_dir = os.path.join('/backups','mediawiki')
def run_cmd(f,cmd):
"""This runs a command and writes the output to a log file."""
f.write("About to run the following command for backup mysql task:\n")
f.write(" $ " + " ".join(cmd))
f.write("\n")
f.write("Command Output:")
for loo in execute(cmd):
f.write(loo)
f.write("\n")
f.write("Done.")
d = datetime.now().strftime('%Y-%m-%d_%H%M')
logfile = '/tmp/backup_mw_forever_%s.log'%(d)
print("Running task. Log file: %s"%(logfile))
with open(logfile,'w') as f:
py_bin = sys.executable
# Step 1:
# Make sure work dir and backup dir exist
if not os.path.exists(work_dir):
# Quit if working dir does not exist
return
if not os.path.exists(backup_dir):
# Make backup dir if it does not exist
subprocess.call(['mkdir','-p',backup_dir])
subprocess.call(['chown',username+':'+username,backup_dir])
time.sleep(5)
# Step 2:
# Back up wiki files to tar file
backup_script = os.path.join(mw_dir,'backup_wikifiles.sh')
tar_file = 'wikifiles_%s.tar.gz'%(d)
backup_target = os.path.join(backup_dir,tar_file)
backup_cmd = [backup_script,backup_target]
run_cmd(f,backup_cmd)
time.sleep(5)
if __name__=="__main__":
# Run a forever loop
time.sleep(5)
while True:
task('{{ pod_install_dir }}','{{ username }}')
fudge = 30
one_week = 7*24*60*60 - fudge
time.sleep(one_week)

View File

@@ -0,0 +1,11 @@
[Unit]
Description=do the mediawiki files backup task with a forever loop
[Service]
Restart=always
ExecStart=/usr/bin/python {{ pod_install_dir }}/scripts/backup_wikifiles.py
ExecStop=/usr/bin/pgrep -f backup_wikifiles | /usr/bin/xargs /bin/kill
[Install]
WantedBy=default.target

View File

@@ -0,0 +1,74 @@
#!/usr/bin/env python3
import subprocess
import os, sys
import time
from datetime import datetime
from executioner import execute
"""
Forever Loop of Data Bot (Gitea) Task
Run a forever loop to scrape gitea for
commit counts, commit the new commit counts,
and check them all in.
"""
def task(pod_dir):
work_dir = os.path.join(pod_dir,'utils-gitea')
def run_cmd(f,cmd):
"""
This runs a databot command
and writes the output to a log file.
"""
f.write("About to run the following command for databot forever task:\n")
f.write(" $ " + " ".join(cmd))
f.write("\n")
f.write("Command Output:")
for loo in execute(cmd):
f.write(loo)
f.write("\n")
f.write("Done.")
d = datetime.now().strftime('%Y-m-%d')
with open('/tmp/databot_forever_%s.log'%(d),'w') as f:
py_bin = sys.executable
# Step 1:
# scrape gitea as sudo
scrape_script = os.path.join(work_dir,'scrape_gitea_as_sudo.sh')
scrape_cmd = ['sudo',
py_bin,scrape_script]
run_cmd(scrape_cmd)
time.sleep(5)
# Step 2:
# assemble gitea counts as user
assemble_script = os.path.join(work_dir,'assemble_gitea_counts.sh')
assemble_cmd = ['sudo','-H','-u','charles',
py_bin,assemble_script]
run_cmd(assemble_cmd)
time.sleep(5)
# Step 3:
# gitea pull data as user
pull_script = 'git_pull_data.py.j2'
pull_cmd = ['sudo','-H','-u','charles',
py_bin,pull_script]
run_cmd(pull_cmd)
if __name__=="__main__":
# Run a forever loop
time.sleep(5)
while True:
task('{{ pod_install_dir }}')
time.sleep(5)

View File

@@ -0,0 +1,10 @@
[Unit]
Description=do the databot task with a forever loop
[Service]
Restart=always
ExecStart=/usr/bin/python {{ pod_install_dir }}/scripts/databot_forever.py
ExecStop=/usr/bin/pgrep -f databot_forever | /usr/bin/xargs /bin/kill
[Install]
WantedBy=default.target

21
scripts/executioner.py Normal file
View File

@@ -0,0 +1,21 @@
#!/usr/bin/env python3
import subprocess
import os
def execute(cmd):
"""
A function to run a command and return the
lines of output as they are generated,
allowing the calling function to "stream"
the output of the command to print() or etc.
"""
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
for stdout_line in iter(p.stdout.readline, ""):
yield stdout_line
p.stdout.close()
err = "".join([j for j in iter(p.stderr.readline,"")])
return_code = p.wait()
if return_code:
yield err
raise subprocess.CalledProcessError(return_code, cmd)

View File

@@ -0,0 +1,78 @@
#!/usr/bin/env python3
import subprocess
import os
from executioner import execute
"""
/www Initial Setup
This script sets up the initial /www
directory structure for charlesreid1.com
content. (Or, charlesreid1.XYZ, whatever.)
"""
SERVER_NAME_DEFAULT = '{{ server_name_default }}'
USERNAME = '{{ username }}'
# Set repo urls
### # Use git.charlesreid1.com to clone charlesreid1.com content
### repourl = "https://git.charlesreid1.com/charlesreid1/charlesreid1.com.git"
#
# Use github.com to clone charlesreid1.com content
repourl = "https://github.com/charlesreid1-docker/charlesreid1.com.git"
# Set directory locations
root = '/www'
basedir = os.path.join(root,SERVER_NAME_DEFAULT)
htdocsdir = os.path.join(basedir,'htdocs')
dotgitdir = os.path.join(basedir,'git.htdocs')
# Start by making sure the base directory exists
subprocess.call(['mkdir','-p',basedir])
# Run the clone command, but only if there is no dot git directory yet
# /www/SERVER_NAME_DEFAULT/htdocs
if not os.path.exists(dotgitdir):
if os.path.exists(htdocsdir):
# an htdocs dir with no dot git dir?
# this must be some kind of mistake.
subprocess.call(['rm','-fr',htdocsdir])
# Clone htdocs folder for SERVER_NAME_DEFAULT
clonecmd = ['git','clone',
'--separate-git-dir=%s'%(dotgitdir),
'-b','gh-pages',
repourl, htdocsdir]
print("About to clone /www content for %s using command:\n"%(SERVER_NAME_DEFAULT))
print(" $ " + " ".join(clonecmd))
print("\n")
print("Command Output:")
for loo in execute(clonecmd):
print(loo)
print("\n")
print("Done.")
# Step 2: chown everybody
# Construct chown command
chowncmd = ['chown','-R',
USERNAME+":"+USERNAME,
basedir]
print("About to chown /www directory using command:\n")
print(" $ " + " ".join(chowncmd))
print("\n")
print("Command Output:")
for loo in execute(chowncmd):
print(loo)
print("\n")
print("Done.")

View File

@@ -0,0 +1,67 @@
#!/usr/bin/env python3
import subprocess
import os
from executioner import execute
"""
Pull /www
This script git pulls the /www directory
for updating charlesreid1.com content.
"""
SERVER_NAME_DEFAULT = '{{ server_name_default }}'
USERNAME = '{{ username }}'
# Set directory locations
root = '/www'
basedir = os.path.join(root,SERVER_NAME_DEFAULT)
htdocsdir = os.path.join(basedir,'htdocs')
dotgitdir = os.path.join(basedir,'git.htdocs')
# Step 1: git pull
# Run the pull command, but only if
# the htdocs dir already exists
# /www/<domain>/htdocs
if os.path.exists(htdocsdir):
# Git pull, respecting the non-standard
# layout of the .git directory
pullcmd = ['git',
'-C',basedir,
'--git-dir=%s'%(dotgitdir),
'--work-tree=%s'%(htdocsdir),
'pull','origin','gh-pages']
print("About to pull /www content for %s using command:\n"%(SERVER_NAME_DEFAULT))
print(" $ " + " ".join(pullcmd))
print("\n")
print("Command Output:")
for loo in execute(pullcmd):
print(loo)
print("\n")
print("Done.")
# Step 2: chown everybody
# Construct chown command
chowncmd = ['chown','-R',
USERNAME+":"+USERNAME,
basedir]
print("About to chown /www directory using command:\n")
print(" $ " + " ".join(chowncmd))
print("\n")
print("Command Output:")
for loo in execute(chowncmd):
print(loo)
print("\n")
print("Done.")

12
scripts/memo Normal file
View File

@@ -0,0 +1,12 @@
the way we're writing to output files,
we can't stream it, because it writes each line
of output from the command as it happens,
but it writes it to an open stream,
which it does not close until the command
is finished.
what if you just want everything to frikin
work like a unix command line utility?
the logging, the flags, the robustness,
the simplicity, the lightweight footprint

View File

@@ -1,33 +0,0 @@
#!/bin/bash
#
# Just make a daily gitea files backup.
set -x
stamp="`date +"%Y-%m-%d"`"
backup_tool="${HOME}/codes/docker/pod-charlesreid1/utils-gitea/backup_gitea.sh"
backup_dir="/junkinthetrunk/backups/daily/gitea_${stamp}"
backup_target="${backup_dir}"
log_dir="${HOME}/.logs/backups/daily"
log_target="${log_dir}/gitea_${stamp}.log"
mkdir -p ${backup_dir}
mkdir -p ${log_dir}
cat /dev/null > ${log_target}
echo "================================" >> ${log_target}
echo "=== Gitea Files Backup =========" >> ${log_target}
echo "================================" >> ${log_target}
echo "" >> ${log_target}
echo "Backup Utility: ${backup_tool}" >> ${log_target}
echo "Backup Target: ${backup_target}" >> ${log_target}
echo "Log Target: ${log_target}" >> ${log_target}
echo "" >> ${log_target}
echo "Command: ${backup_tool} ${backup_target} >> ${log_target} 2>&1 " >> ${log_target}
echo "" >> ${log_target}
${backup_tool} ${backup_target} >> ${log_target} 2>&1
echo "Done" >> ${log_target}

View File

@@ -1,33 +0,0 @@
#!/bin/bash
#
# Just make a daily MySQL backup.
stamp=$(date +"%Y-%m-%d")
backup_tool="${HOME}/codes/docker/pod-charlesreid1/utils-mysql/dump_database.sh"
backup_dir="/junkinthetrunk/backups/daily/wikidb_${stamp}"
backup_target="${backup_dir}/wikidb_${stamp}.sql"
log_dir="${HOME}/.logs/backups/daily"
log_target="${log_dir}/wikidb_${stamp}.log"
mkdir -p ${backup_dir}
mkdir -p ${log_dir}
cat /dev/null > ${log_target}
echo "=======================================" | tee ${log_target}
echo "=== MediaWiki Database Backup =========" | tee ${log_target}
echo "=======================================" | tee ${log_target}
echo "" | tee ${log_target}
echo "Backup Utility: ${backup_tool}" | tee ${log_target}
echo "Backup Target: ${backup_target}" | tee ${log_target}
echo "Log Target: ${log_target}" | tee ${log_target}
echo "" | tee ${log_target}
set -x
${backup_tool} ${backup_target} >> ${log_target} 2>&1
set +x
echo "Done" | tee ${log_target}

View File

@@ -1,32 +0,0 @@
#!/bin/bash
#
# Just make a daily MediaWiki files backup.
stamp="`date +"%Y-%m-%d"`"
backup_tool="${HOME}/codes/docker/pod-charlesreid1/utils-mw/backup_wikifiles.sh"
backup_dir="/junkinthetrunk/backups/daily/wikifiles_${stamp}"
backup_target="${backup_dir}/wikifiles_${stamp}.tar.gz"
log_dir="${HOME}/.logs/backups/daily"
log_target="${log_dir}/wikifiles_${stamp}.log"
mkdir -p ${backup_dir}
mkdir -p ${log_dir}
cat /dev/null > ${log_target}
echo "====================================" | tee ${log_target}
echo "=== MediaWiki Files Backup =========" | tee ${log_target}
echo "====================================" | tee ${log_target}
echo "" | tee ${log_target}
echo "Backup Utility: ${backup_tool}" | tee ${log_target}
echo "Backup Target: ${backup_target}" | tee ${log_target}
echo "Log Target: ${log_target}" | tee ${log_target}
echo "" | tee ${log_target}
set -x
${backup_tool} ${backup_target} >> ${log_target} 2>&1
set +x
echo "Done" | tee ${log_target}

83
utils-backup/gitea_dump.sh Executable file
View File

@@ -0,0 +1,83 @@
#!/bin/bash
#
# Run the gitea dump command and send the dump file
# to the specified backup directory.
#
# Backup directory:
# /home/user/backups/gitea
BACKUP_DIR="$HOME/backups/gitea"
CONTAINER_NAME="pod-charlesreid1_stormy_gitea_1"
function usage {
set +x
echo ""
echo "gitea_dump.sh script:"
echo ""
echo "Run the gitea dump command inside the gitea docker container,"
echo "and copy the resulting zip file to the specified directory."
echo "The resulting gitea dump zip file will be timestamped."
echo ""
echo " ./gitea_dump.sh"
echo ""
echo "Example:"
echo ""
echo " ./gitea_dump.sh"
echo " (creates ${BACKUP_DIR}/gitea-dump_20200101_000000.zip)"
echo ""
exit 1;
}
if [ "$(id -u)" == "0" ]; then
echo ""
echo ""
echo "This script should NOT be run as root!"
echo ""
echo ""
exit 1;
fi
if [ "$#" == "0" ]; then
STAMP="`date +"%Y-%m-%d"`"
TARGET="gitea-dump_${STAMP}.zip"
echo ""
echo "pod-charlesreid1: gitea_dump.sh"
echo "-------------------------------"
echo ""
echo "Backup target: ${BACKUP_DIR}/${TARGET}"
echo ""
mkdir -p $BACKUP_DIR
# If this script is being run from a cron job,
# don't use -i flag with docker
CRON="$( pstree -s $$ | /bin/grep -c cron )"
DOCKERX=""
if [[ "$CRON" -eq 1 ]];
then
DOCKERX="docker exec -t"
else
DOCKERX="docker exec -it"
fi
echo "Step 1: Run gitea dump command inside docker machine"
set -x
${DOCKERX} --user git ${CONTAINER_NAME} /bin/bash -c 'cd /app/gitea && /app/gitea/gitea dump --file gitea-dump.zip --skip-repository'
set +x
echo "Step 2: Copy gitea dump file out of docker machine"
set -x
docker cp ${CONTAINER_NAME}:/app/gitea/gitea-dump.zip ${BACKUP_DIR}/${TARGET}
set +x
echo "Step 3: Clean up gitea dump file"
set -x
${DOCKERX} ${CONTAINER_NAME} /bin/bash -c "rm -f /app/gitea/gitea-dump.zip"
set +x
echo "Done."
else
usage
fi

View File

@@ -1,33 +0,0 @@
#!/bin/bash
#
# Just make a monthly MediaWiki files backup.
set -x
stamp="`date +"%Y-%m-%d"`"
backup_tool="${HOME}/codes/docker/pod-charlesreid1/utils-gitea/backup_gitea.sh"
backup_dir="/junkinthetrunk/backups/monthly/gitea_${stamp}"
backup_target="${backup_dir}"
log_dir="${HOME}/.logs/backups/monthly"
log_target="${log_dir}/gitea_${stamp}.log"
mkdir -p ${backup_dir}
mkdir -p ${log_dir}
cat /dev/null > ${log_target}
echo "================================" >> ${log_target}
echo "=== Gitea Files Backup =========" >> ${log_target}
echo "================================" >> ${log_target}
echo "" >> ${log_target}
echo "Backup Utility: ${backup_tool}" >> ${log_target}
echo "Backup Target: ${backup_target}" >> ${log_target}
echo "Log Target: ${log_target}" >> ${log_target}
echo "" >> ${log_target}
echo "Command: ${backup_tool} ${backup_target} >> ${log_target} 2>&1 " >> ${log_target}
echo "" >> ${log_target}
${backup_tool} ${backup_target} >> ${log_target} 2>&1
echo "Done" >> ${log_target}

73
utils-backup/wikidb_dump.sh Executable file
View File

@@ -0,0 +1,73 @@
#!/bin/bash
#
# Run the mysql dump command to back up wikidb table, and send the
# resulting SQL file to the specified backup directory.
#
# Backup directory:
# /home/user/backups/mysql
BACKUP_DIR="$HOME/backups/mysql"
CONTAINER_NAME="pod-charlesreid1_stormy_mysql_1"
function usage {
set +x
echo ""
echo "wikidb_dump.sh script:"
echo ""
echo "Run the mysql dump command on the wikidb table in the container,"
echo "and copy the resulting SQL file to the specified directory."
echo "The resulting mysql dump SQL file will be timestamped."
echo ""
echo " ./wikidb_dump.sh"
echo ""
echo "Example:"
echo ""
echo " ./wikidb_dump.sh"
echo " (creates ${BACKUP_DIR}/wikidb_20200101_000000.sql)"
echo ""
exit 1;
}
if [ "$(id -u)" == "0" ]; then
echo ""
echo ""
echo "This script should NOT be run as root!"
echo ""
echo ""
exit 1;
fi
if [ "$#" == "0" ]; then
STAMP="`date +"%Y-%m-%d"`"
TARGET="wikidb_${STAMP}.sql"
echo ""
echo "pod-charlesreid1: wikidb_dump.sh"
echo "--------------------------------"
echo ""
echo "Backup target: ${BACKUP_DIR}/${TARGET}"
echo ""
mkdir -p $BACKUP_DIR
# If this script is being run from a cron job,
# don't use -i flag with docker
CRON="$( pstree -s $$ | /bin/grep -c cron )"
DOCKERX=""
if [[ "$CRON" -eq 1 ]];
then
DOCKERX="docker exec -t"
else
DOCKERX="docker exec -it"
fi
echo "Running mysqldump"
set -x
${DOCKERX} ${CONTAINER_NAME} sh -c 'exec mysqldump wikidb --databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > ${BACKUP_DIR}/${TARGET}
set +x
echo "Done."
else
usage
fi

85
utils-backup/wikifiles_dump.sh Executable file
View File

@@ -0,0 +1,85 @@
#!/bin/bash
#
# Create a tar file containing wiki files
# from the mediawiki docker container.
#
# Backup directory:
# /home/user/backups/mediawiki
BACKUP_DIR="$HOME/backups/mediawiki"
CONTAINER_NAME="pod-charlesreid1_stormy_mw_1"
STAMP="`date +"%Y-%m-%d"`"
function usage {
set +x
echo ""
echo "wikifiles_dump.sh script:"
echo ""
echo "Create a tar file containing wiki files"
echo "from the mediawiki docker container."
echo "The resulting tar file will be timestamped."
echo ""
echo " ./wikifiles_dump.sh"
echo ""
echo "Example:"
echo ""
echo " ./wikifiles_dump.sh"
echo " (creates ${BACKUP_DIR}/wikifiles_20200101_000000.tar.gz)"
echo ""
exit 1;
}
if [ "$(id -u)" == "0" ]; then
echo ""
echo ""
echo "This script should NOT be run as root!"
echo ""
echo ""
exit 1;
fi
if [ "$#" == "0" ]; then
TARGET="wikifiles_${STAMP}.tar.gz"
echo ""
echo "pod-charlesreid1: wikifiles_dump.sh"
echo "-----------------------------------"
echo ""
echo "Backup target: ${BACKUP_DIR}/${TARGET}"
echo ""
mkdir -p $BACKUP_DIR
# If this script is being run from a cron job,
# don't use -i flag with docker
CRON="$( pstree -s $$ | /bin/grep -c cron )"
DOCKERX=""
if [[ "$CRON" -eq 1 ]];
then
DOCKERX="docker exec -t"
else
DOCKERX="docker exec -it"
fi
echo "Step 1: Compress wiki files inside container"
set -x
${DOCKERX} ${CONTAINER_NAME} /bin/tar czf /tmp/${TARGET} /var/www/html/images
set +x
echo "Step 2: Copy tar.gz file out of container"
mkdir -p $(dirname "$1")
set -x
docker cp ${CONTAINER_NAME}:/tmp/${TARGET} ${BACKUP_DIR}/${TARGET}
set +x
echo "Step 3: Clean up tar.gz file"
set -x
${DOCKERX} ${CONTAINER_NAME} /bin/rm -f /tmp/${TARGET}
set +x
echo "Done."
else
usage
fi

View File

@@ -1,66 +0,0 @@
# Dump Gitea Backup
Running the dump command creates two zip files.
The first zip file is created by gitea via `gitea dump`.
The second zip file is a directory in gitea containing user avatars
(not backed up using the above `gitea dump` command).
### The gitea dump command
When you run `gitea dump`, gitea will create a single zip file archive
of the entire contents of the gitea site, in the current directory
(where the `gitea dump` command was run from).
### The gitea dmp directory structure
The built-in `gitea dump` functionality will create a zip
that contains the following directory structure:
```
gitea-repo.zip
gitea-db.sql
custom/
log/
```
When the `gitea-repo.zip` folder is unzipped, it generates a `repositories/` folder
containing the contents of every git repo in the gitea site.
In a real gitea server, here is where these should go:
The `repositories/` dir should be at:
```
<gitea-base-dir>/repositories
```
The `custom/` dir should be at:
```
<gitea-base-dir>/bin/custom
```
The database file should be at:
```
<gitea-base-dir>/data/gitea-db.sql
```
The log should be at:
```
<gitea-base-dir>/log
```
If you are running gitea using docker,
`<gitea-base-dir>` will be `/data/gitea/`.
### The avatars directory
Not much to it, just create a zip file from the
`avatars/` directory and move that zip file
out of the container.

View File

@@ -1,34 +0,0 @@
# Quick Start
We provide a backup and restore script.
The backup script takes a directory as an argument,
and places two backup zip files at the specified location:
```
./backup_gitea.sh <target-dir>
```
Example:
```
$ ./backup_gitea.sh /path/to/backup/target/
$ ls /path/to/backup/target/
gitea-dump-000000.zip
gitea-avatars.zip
```
The restore script will take two zip files as inputs,
the dump zip and the avatars zip:
```
./backup_gitea.sh <gitea-dump-zip> <gitea-avatars-zip>
```
Example using some bash completion magic:
```
$ ./backup_gitea.sh /path/to/backup/target/gitea-{dump-00000,avatars}.zip
```

View File

@@ -1,17 +0,0 @@
# Gitea Dump/Restore Scripts
Fortunately, gitea provides a `gitea dump` command to create a backup.
Unfortunately, gitea does not provide a `gitea restore` command to restore from a backup.
## Gitea Dump Command
See [GiteaDumpCommand.md](GiteaDumpCommand.md)
## Dump Gitea Backup
See [DumpGiteaBackup.md](DumpGiteaBackup.md)
## Restore Gitea Backup
See [RestoreGiteaBackup.md](RestoreGiteaBackup.md)

View File

@@ -1,11 +0,0 @@
# Restore Gitea Backup
The restore script takes two separate arguments.
The first is the zip file created from the `gitea dump` command above.
The second is the zip file containing user avatars.
Not much more to it than that.

View File

@@ -1,38 +0,0 @@
#!/bin/bash
#
# This stupid script is needed because
# ssh and and sudo don't play nice together.
#
# the sudo cron script already extracts
# stats from the repo, which lives in a docker
# volume and hence requires sudo to access.
# also fixes ownership to charles:charles.
#
# now we use non sudo to check new data in.
if [ "$(id -u)" == "0" ]; then
echo ""
echo ""
echo "This script should be run as a regular user."
echo ""
echo ""
exit 1;
fi
WORKDIR="/tmp/gitea-temp"
DATDIR="/tmp/gitea-temp/charlesreid1-data"
CLONEDIR="/tmp/gitea-temp/charlesreid1-data2"
rm -rf ${CLONEDIR}
git clone ssh://git@gitdatabot:222/data/charlesreid1-data.git ${CLONEDIR}
(
cp ${DATDIR}/commit_counts.csv ${CLONEDIR}/.
cd ${CLONEDIR}
git config user.name "databot"
git config user.email "databot@charlesreid1.com"
git add commit_counts.csv
git commit commit_counts.csv -m '[scrape_gitea_as_sudo.sh] updating gitea commit count data'
git push origin master
)

View File

@@ -1,86 +0,0 @@
#!/bin/bash
set -x
function usage {
set +x
echo ""
echo "backup_gitea.sh script:"
echo "Run a gitea dump from the gitea container,"
echo "and back up the gitea avatars."
echo "Gitea backups are dumped to gitea-dump-*.zip"
echo "and gitea-avatars.zip and copied to the target directory."
echo ""
echo " ./backup_gitea.sh <target-dir>"
echo ""
echo "Example:"
echo ""
echo " ./backup_gitea.sh /path/to/backups/"
echo ""
echo "creates the files:"
echo""
echo " /path/to/backups/gitea-dump-*.zip"
echo " /path/to/backups/gitea-avatars.zip"
echo ""
echo ""
exit 1;
}
if [[ "$#" -gt 0 ]];
then
echo ""
echo "Backup Gitea:"
echo "----------------"
echo ""
NAME="podcharlesreid1_stormy_gitea_1"
# If this script is being run from a cron job,
# don't use -i flag with docker
CRON="$( pstree -s $$ | /bin/grep -c cron )"
DOCKER=""
if [[ "$CRON" -eq 1 ]];
then
DOCKER="docker exec -t"
else
DOCKER="docker exec -it"
fi
echo "Step 1: Creating backup target"
${DOCKER} $NAME /bin/bash -c 'mkdir /backup'
echo "Step 2: Creating backup zip files:"
echo " Step 2A: gitea dump zip"
${DOCKER} $NAME /bin/bash -c '/app/gitea/gitea dump'
echo " Step 2B: gitea avatars zip"
${DOCKER} $NAME /bin/bash -c 'cd /data/gitea/ && tar czf /backup/gitea-avatars.tar.gz avatars'
echo "Step 3: Moving gitea dump to /backup directory"
${DOCKER} $NAME /bin/bash -c 'mv /tmp/gitea-dump-*/* /backup/.'
TEMP_BACKUP=`mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir'`
echo "Step 4: Copying backup directory (with zip files) to backup location $1"
echo " Step 4A: Making temporary backup location"
#mkdir -p $TEMP_BACKUP
echo " Step 4B: Copying /backup directory to temporary backup location $1"
docker cp $NAME:/backup/* $1/.
TAR_PREFIX="$(echo $V | sed 's+/$++g')"
tar -cvf ${TAR_PREFIX}.tar $1
rm -fr $1
echo "Step 6: Cleaning up container"
${DOCKER} $NAME /bin/bash -c 'rm -rf /backup'
${DOCKER} $NAME /bin/bash -c 'rm -rf /tmp/gitea-dump-*'
echo "Step 7: Cleaning up local host"
#rm -rf $TEMP_BACKUP
echo " ~ ~ ~ ~ PEACE OUT ~ ~ ~ ~"
else
usage
fi

View File

@@ -1,82 +0,0 @@
#!/bin/bash
set -x
function usage {
echo ""
echo "restore_gitea.sh script:"
echo "Restore a gitea site from a .zip dump file and a .zip avatars file."
echo ""
echo " ./restore_gitea.sh <dump-zip-file> <avatars-zip-file>"
echo ""
echo "Example:"
echo ""
echo " ./restore_gitea.sh /path/to/gitea-dump.zip /path/to/gitea-avatars.zip"
echo ""
echo ""
exit 1;
}
echo ""
echo "Restore Gitea:"
echo "----------------"
echo ""
NAME="podcharlesreid1_stormy_gitea_1"
if [[ "$#" -eq 2 ]];
then
EXEC="docker exec -it $NAME"
CP="docker cp"
echo "- Copying files into container"
${EXEC} /bin/bash -c 'mkdir /restore'
${CP} $1 $NAME:/restore/gitea-dump.zip
${CP} $2 $NAME:/restore/gitea-avatars.zip
echo "- Unpacking files inside container"
${EXEC} /bin/bash -c 'unzip -qq /restore/gitea-dump.zip -d /restore'
${EXEC} /bin/bash -c 'unzip -qq /restore/gitea-avatars.zip -d /restore'
echo " - Unpacking repositories inside container"
${EXEC} /bin/bash -c 'unzip -qq /restore/gitea-repo.zip -d /restore'
echo " - Restoring 1/5: repositories"
${EXEC} /bin/bash -c 'rm -rf /data/git/repositories && cp -r /restore/repositories /data/git/repositories'
# We are actually just gonna skip this whole step,
# since everything here should be in d-gitea repo
echo " - Restoring 2/5: (skipping custom files)"
#echo " - Restoring 2/5: custom files"
#echo " - Moving old app.ini"
#${EXEC} /bin/bash -c 'mv /data/gitea/conf/app.ini /data/gitea/conf/app.ini.old'
#echo " - Restoring custom files"
#${EXEC} /bin/bash -c 'rm -rf /data/gitea && cp -r /restore/custom /data/gitea'
echo " - Restoring 3/5: sqlite database"
${EXEC} /bin/bash -c 'cat /restore/gitea-db.sql | sed "s/false/0/g" | sed "s/true/1/g" | sqlite3 /data/gitea/gitea.db'
echo " - Restoring 4/5: avatars"
${EXEC} /bin/bash -c 'rm -rf /data/gitea/avatars && cp -r /restore/avatars /data/gitea/avatars'
echo " - Restoring 5/5: repairing paths in .git/hooks"
###################
# NOTE: This is entirely case-dependent.
# If backup/restore happened from same gitea dir structure,
# this section should be removed entirely.
#
# Example below swaps out /www/gitea with /data/gitea
#
#${EXEC} /bin/bash -c 'find /data/git/repositories -type f -exec sed -i -e "s%/www/gitea/bin/custom/conf%/data/gitea/conf%g" {} \;'
#${EXEC} /bin/bash -c 'find /data/git/repositories -type f -exec sed -i -e "s%/www/gitea/bin/gitea%/app/data/gitea%g" {} \;'
#
###################
echo " - Cleaning up"
${EXEC} /bin/bash -c 'rm -rf /restore'
else
usage
fi

View File

@@ -1,96 +0,0 @@
#!/bin/bash
#
# This stupid script needs too be scaled back,
# because sudo and ssh can't play nicely together.
#
# This entire idiotic adventure in docker land
# has been chock full of the most inane, stupid
# landmines that containers cannot avoid,
# like this one - if you try and run ssh through sudo,
# you can't deal with keys or passphrases.
#
# Seriously. Gimme a fucking break.
#
#
# This script scrapes repository logs
# from the docker volume holding gitea.
#
# It assembles a commit count for use
# in visualizing git commits.
#
# It commits the new commit count data
# to https://git.charlesreid1.com/data/charlesreid1-data
if [ "$(id -u)" != "0" ]; then
echo ""
echo ""
echo "This script should be run as root."
echo ""
echo ""
exit 1;
fi
WORKDIR="/tmp/gitea-temp"
GITDIR="/tmp/gitea-temp/charlesreid1-data"
rm -rf ${WORKDIR}
mkdir -p ${WORKDIR}
sudo chown -R charles:charles ${WORKDIR}
rm -rf ${GITDIR}
mkdir -p ${GITDIR}
# Because sudo and ssh are too stupid to play nicely,
# we're forced to use this sudo script to dump out
# information every hour,
# and leave it up to some user script somewhere
# to grab the latest whenever they need it.
#
# This is the most idiotic problem yet.
# don't clone data repo, that's the whole stupid problem
### sudo -H -u charles git clone ssh://git@gitdatabot:222/data/charlesreid1-data.git ${GITDIR}
# Step 2: extract commit dates
for dir in `find /var/lib/docker/volumes/podcharlesreid1_stormy_gitea_data/_data/git/repositories -mindepth 2 -maxdepth 2 -type d`; do
git --git-dir=$dir --work-tree=${WORKDIR} \
log \
--all --author="harles" --oneline --pretty="%H %ai" \
| cut -f 2 -d " " >> ${GITDIR}/commit_dates
done
# Step 3: bin commit dates
words=$( cat ${GITDIR}/commit_dates )
echo "date,commits" > ${GITDIR}/commit_counts.csv
echo $words | sort | python -c 'import sys;
from collections import Counter; c=Counter(sys.stdin.read().strip().split(" "));
print("\n".join(("%s,%s"%(k, c[k]) for k in c.keys())));' | sort | awk 'NF' >> ${GITDIR}/commit_counts.csv
rm -f ${GITDIR}/commit_dates
chown charles:charles ${GITDIR}/commit_counts.csv
# leave commits to the user.
###
### (
### cd ${GITDIR}
### sudo -H -u charles git config user.name "databot"
### sudo -H -u charles git config user.email "databot@charlesreid1.com"
### sudo -H -u charles git add commit_counts.csv
### sudo -H -u charles git commit commit_counts.csv -m '[scrape_gitea_as_sudo.sh] updating gitea commit count data'
### sudo -H -u charles git push origin master
### )

View File

@@ -1,58 +0,0 @@
#!/bin/bash
#
# Create a tar file containing wiki files
# from the stormy_mw container.
set -x
function usage {
echo ""
echo "backup_wikifiles.sh script:"
echo "Create a tar file containing wiki files"
echo "from the stormy_mw container"
echo ""
echo " ./backup_wikifiles.sh <tar-file>"
echo ""
echo "Example:"
echo ""
echo " ./backup_wikifiles.sh /path/to/wikifiles.tar.gz"
echo ""
echo ""
exit 1;
}
if [[ "$#" -gt 0 ]];
then
echo ""
echo "Backup MediaWiki Files:"
echo "------------------------"
echo ""
NAME="podcharlesreid1_stormy_mw_1"
TAR="wikifiles.tar.gz"
# If this script is being run from a cron job,
# don't use -i flag with docker
CRON="$( pstree -s $$ | /bin/grep -c cron )"
DOCKER=""
if [[ "$CRON" -eq 1 ]];
then
DOCKER="docker exec -t"
else
DOCKER="docker exec -it"
fi
# zip to temp dir inside container
${DOCKER} ${NAME} tar czf /tmp/${TAR} /var/www/html/images
# copy from container to target $1
mkdir -p $(dirname $TARGET)
${DOCKER} ${NAME} cp ${NAME}:/tmp/${TAR} $1
# clean up container
${DOCKER} ${NAME} rm /tmp/${TAR}
else
usage
fi

View File

@@ -2,7 +2,6 @@
# #
# Restore wiki files from a tar file # Restore wiki files from a tar file
# into the stormy_mw container. # into the stormy_mw container.
set -x
function usage { function usage {
echo "" echo ""
@@ -28,11 +27,14 @@ function usage {
if [[ "$#" -eq 1 ]]; if [[ "$#" -eq 1 ]];
then then
NAME="podcharlesreid1_stormy_mw_1" NAME="pod-charlesreid1_stormy_mw_1"
TAR=$(basename "$1") TAR=$(basename "$1")
set -x
docker cp $1 ${NAME}:/tmp/${TAR} docker cp $1 ${NAME}:/tmp/${TAR}
docker exec -it ${NAME} mv /var/www/html/images /var/www/html/images.old docker exec -it ${NAME} mv /var/www/html/images /var/www/html/images.old
docker exec -it ${NAME} tar -xf /tmp/${TAR} -C /var/www/html/ && rm -f /tmp/${TAR} docker exec -it ${NAME} tar -xf /tmp/${TAR} -C / && rm -f /tmp/${TAR}
docker exec -it ${NAME} chown -R www-data:www-data /var/www/html/images
set +x
else else
usage usage

View File

@@ -23,7 +23,7 @@ function usage {
if [[ "$#" -eq 0 ]]; if [[ "$#" -eq 0 ]];
then then
NAME="podcharlesreid1_stormy_mw_1" NAME="pod-charlesreid1_stormy_mw_1"
docker exec -it ${NAME} php /var/www/html/maintenance/update.php docker exec -it ${NAME} php /var/www/html/maintenance/update.php
else else

View File

@@ -20,14 +20,14 @@ function usage {
exit 1; exit 1;
} }
NAME="podcharlesreid1_stormy_mysql_1" CONTAINER_NAME="pod-charlesreid1_stormy_mysql_1"
if [[ "$#" -gt 0 ]]; if [[ "$#" -gt 0 ]];
then then
TARGET="$1" TARGET="$1"
mkdir -p $(dirname $TARGET) mkdir -p $(dirname $TARGET)
docker exec -i ${NAME} sh -c 'exec mysqldump wikidb --databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > $TARGET docker exec -i ${CONTAINER_NAME} sh -c 'exec mysqldump wikidb --databases -uroot -p"$MYSQL_ROOT_PASSWORD"' > $TARGET
else else
usage usage

View File

@@ -6,14 +6,15 @@
# Note that this expects the .sql dump # Note that this expects the .sql dump
# to create its own databases. # to create its own databases.
# Use the --databases flag with mysqldump. # Use the --databases flag with mysqldump.
set -x
function usage { function usage {
echo "" echo ""
echo "restore_database.sh script:" echo "restore_database.sh script:"
echo ""
echo "Restores a database from an SQL dump." echo "Restores a database from an SQL dump."
echo "Restores the database into the " echo "Restores the database into the "
echo "stormy_msyql container." echo "stormy_msyql container."
echo ""
echo "Obtains MySQL password from" echo "Obtains MySQL password from"
echo "MYSQL_ROOT_PASSWORD env var" echo "MYSQL_ROOT_PASSWORD env var"
echo "inside mysql container." echo "inside mysql container."
@@ -40,13 +41,32 @@ function usage {
# a complete and utterpain in the ass # a complete and utterpain in the ass
# because of all these one-off # because of all these one-off
# "whoopsie we don't do that" problems. # "whoopsie we don't do that" problems.
#
CONTAINER_NAME="pod-charlesreid1_stormy_mysql_1"
TARGET=$(basename $1)
TARGET_DIR=$(dirname $1)
if [[ "$#" -eq 1 ]]; if [[ "$#" -eq 1 ]];
then then
docker exec -i podcharlesreid1_stormy_mysql_1 \
mysql -uroot -p$MYSQL_ROOT_PASSWORD \ # Step 1: Copy the sql dump into the container
< $1 set -x
docker cp $1 ${CONTAINER_NAME}:/tmp/${TARGET}
set +x
# Step 2: Run sqldump inside the container
set -x
docker exec -i ${CONTAINER_NAME} sh -c "/usr/bin/mysql --defaults-file=/root/.mysql.rootpw.cnf < /tmp/${TARGET}"
set +x
# Step 3: Clean up sql dump from inside container
set -x
docker exec -i ${CONTAINER_NAME} sh -c "/bin/rm -fr /tmp/${TARGET}.sql"
set +x
set +x
else else
usage usage
fi fi