import sys
import os
import re
import logging
import subprocess
import json
from argparse import ArgumentParser, SUPPRESS
import time
import shutil
import signal
WORK_DIR = '/tmp'
MOUNTED_POINTS_EXCLUDED_PATTERN = '^((/sys)|(/proc)|(/dev)|(/boot)|(/run)|(/var/lib/Acronis/mount)|(/home/cagefs-skeleton)|(/home/virtfs))'
MOUNTED_POINTS_EXCLUDED_FS = 'sysfs,cgroup,proc,devtmpfs,devpts,pstore,debugfs,hugetlbfs,mqueue,configfs,tmpfs,securityfs,selinuxfs,efivarfs,rpc_pipefs,autofs,binfmt_misc'
MOUNT_DIR = "/var/lib/Acronis/mount"
MYSQL_FREEZE_LOCKFILE = os.path.join(WORK_DIR, 'freeze_mysql.lock')
MYSQL_OUTPUT_FILE = os.path.join(WORK_DIR, 'freeze_mysql.out')
MYSQL_FREEZE = True
MYSQL_FREEZE_TIMEOUT = 120 # in seconds.
MYSQL_FREEZE_SNAPSHOT_TIMEOUT = 10 # in seconds.
MYSQL_FREEZE_ONLY_MYISAM = False
LOGFILE = os.path.join(WORK_DIR, 'prepostdatacapture.log')
TARGET_METADATA_PATH = '/home/metadata.json'
logger = logging.getLogger()
class CommandError(Exception):
def __init__(self, command, error):
super().__init__("CPanel command ({0}) finished with error: {1}".format(command, error))
self.error = error
class FreezeError(Exception):
def __init__(self, error):
super().__init__("mysql freeze error: {0}".format(error))
self.error = error
class MountingError(Exception):
pass
def run_shell(cmd, raise_e=True, log=True):
try:
return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).communicate()[0]
except subprocess.CalledProcessError as e:
if log:
logger.error(
"Command \"{0}\" has failed: {1}\"".format(
cmd,
'' if e.stderr is None else e.stderr.decode('utf-8')
)
)
if raise_e:
raise
def quote(str_to_quote):
_find_unsafe = re.compile(r'[^\w@%+=:,./-]').search
if not str_to_quote:
return "''"
if _find_unsafe(str_to_quote) is None:
return str_to_quote
return "'" + str_to_quote.replace("'", "'\"'\"'") + "'"
class CPanelClient:
WHM_FEATURE_FOLDER = '/usr/local/cpanel/whostmgr/addonfeatures'
CPANEL_USERS_FOLDER = '/var/cpanel/users'
DISABLED_FEATURES = '/var/cpanel/features/disabled'
DISABLED_FEATURE_LIST = 'disabled'
RESELLER_FEATURE = 'acronisbackup_reseller'
RETENTION_FEATURES_PREFIX = "acronisbackup_"
RETENTION_FEATURES_UNLIMITED = "acronisbackup_0"
@classmethod
def _run_shell(cls, cmd):
cmd = '{0} --output=json'.format(cmd)
output = run_shell(cmd)
try:
return json.loads(output.decode('utf-8', 'ignore'))
except json.JSONDecodeError:
logger.error("Cannot parse response from command \"{0}\"".format(cmd))
raise
def _call_whmapi1(self, cmd):
whmapi1_cmd = 'whmapi1 {0}'.format(cmd)
response = self._run_shell(whmapi1_cmd)
metadata = response['metadata']
if metadata['result'] == 0:
raise CommandError(whmapi1_cmd, metadata['reason'])
return response.get('data', {})
def _call_cpapi2(self, cmd):
cpapi2_cmd = 'cpapi2 {0}'.format(cmd)
response = self._run_shell(cpapi2_cmd)
result = response['cpanelresult']
if result.get('error'):
raise CommandError(cpapi2_cmd, result['error'])
return result['data']
def _call_uapi(self, cmd):
uapi_cmd = 'uapi {0}'.format(cmd)
try:
response = self._run_shell(uapi_cmd)
except subprocess.CalledProcessError as e:
raise CommandError(uapi_cmd, e.stderr)
result = response['result']
if result.get('errors'):
raise CommandError(uapi_cmd, '; '.join(result['errors']))
return result['data']
def fetch_version(self):
return self._call_whmapi1('version')['version']
def fetch_accounts(self):
return self._call_whmapi1('listaccts').get('acct', [])
def fetch_databases(self):
return self._call_whmapi1('list_databases').get('payload', [])
def fetch_account_domains_data(self, account_name):
domains = self._call_uapi(
'--user={0} DomainInfo domains_data'.format(quote(account_name))
)
return (
domains.get('main_domain'),
domains.get('sub_domains', []),
domains.get('addon_domains', []),
)
def fetch_addon_domains_data(self, account_name):
return self._call_cpapi2(
'--user={0} AddonDomain listaddondomains'.format(quote(account_name))
)
def fetch_account_domains(self, account_name):
domains = self._call_uapi(
'--user={0} DomainInfo list_domains'.format(quote(account_name))
)
return (
domains.get('main_domain'),
domains.get('sub_domains', []),
domains.get('addon_domains', []),
)
def fetch_account_emails(self, account_name):
return self._call_uapi('--user={0} Email list_pops'.format(quote(account_name)))
def fetch_email_quota(self, account_name, user, domain):
return self._call_uapi('--user={0} Email get_pop_quota email={1} domain={2} as_bytes=1'.format(
quote(account_name),
quote(user),
quote(domain),
))
def fetch_mysql_servers(self):
return self._call_whmapi1('remote_mysql_read_profiles')
def fetch_local_mysql_version(self):
return self._call_whmapi1('current_mysql_version')['version']
def fetch_user_main_domain(self, account_name):
return self._call_cpapi2('--user={0} DomainLookup getmaindomain'.format(
quote(account_name))
)
def fetch_subdomains_data(self, account_name):
return self._call_cpapi2('--user={0} SubDomain listsubdomains'.format(
quote(account_name)
))
class CPanelMetaCollector:
DBMS_TYPE_MYSQL = 'mysql'
RPM_PACKAGE = 'acronis-backup-cpanel'
def __init__(self, cpanel_client):
self.cpanel_client = cpanel_client
self.meta = None
def _get_ent(self, type_, key, index):
getent_cmd = 'getent {0} {1} | cut -d: -f{2}'.format(type_, quote(key), index)
process = subprocess.Popen(getent_cmd, shell=True, stdout=subprocess.PIPE)
out, _ = process.communicate()
result = out.decode().strip()
return result if process.returncode == 0 and result else None
def _get_dbs(self):
dbs = dict()
for db in self.cpanel_client.fetch_databases():
if db['engine'] != self.DBMS_TYPE_MYSQL:
continue
if db['cpuser'] not in dbs:
dbs[db['cpuser']] = list()
dbs[db['cpuser']].append({
'name': db['name'],
'type': db['engine'],
})
return dbs
def _get_account_emails_and_filters(
self,
account_name,
home_dir
):
emails = []
for res_item in self.cpanel_client.fetch_account_emails(account_name):
if '@' in res_item['email']:
login, domain = res_item['email'].split('@', 1)
else:
login = res_item['email']
domain = self.cpanel_client.fetch_user_main_domain(account_name)[0]['main_domain']
quota = self.cpanel_client.fetch_email_quota(account_name, login, domain)
quota = quota.strip() if isinstance(quota, str) else quota
# Quota maybe '' or 'unlimited' in different cPanel versions
quota = 0 if not quota or quota == 'unlimited' else int(quota)
emails.append({
'login': res_item['login'],
'email': res_item['email'],
'quota': quota,
})
if home_dir is None:
return emails, []
filters = []
for email in emails:
if '@' in email['email']:
login, domain = email['email'].split('@', 1)
filter_cache_path = os.path.join(home_dir, 'etc', domain, login, 'filter.cache')
else:
filter_cache_path = os.path.join(home_dir, 'etc', 'filter.cache')
if not os.path.exists(filter_cache_path):
continue
try:
with open(filter_cache_path, 'r') as f:
json.load(f)
except json.JSONDecodeError:
logger.warning(
"Mail filter \"{0}\" contains invalid json.".format(filter_cache_path)
)
continue
except OSError:
logger.warning(
"Cannot read mail filter \"{0}\".".format(filter_cache_path)
)
continue
except Exception as e:
logger.warning(
"Cannot parse mail filter \"{0}\". Error: {1}.".format(
filter_cache_path,
str(e)
)
)
continue
filters.append({
'email': email['email'],
'path': filter_cache_path,
})
return emails, filters
def collect_accounts(self, collect_dbs=True):
self.meta['accounts'] = {}
accounts = self.cpanel_client.fetch_accounts()
logger.info("Accounts number: {0}".format(len(accounts)))
if collect_dbs:
dbs = self._get_dbs()
dbs_number = sum(map(len, dbs.values()))
logger.info("Databases number: {0}".format(dbs_number))
else:
logger.warning("Skipping databases collection.")
dbs = {}
def _make_metadata_for_account(account):
def _update_domain_data(domains, domains_document_root):
for domain in domains:
if not isinstance(domain, dict):
continue
try:
domains_document_root[domain['domain']] = {
'documentroot': domain['documentroot'],
'type': domain.get('type'),
'owner': domain.get('owner'),
'user': domain.get('user'),
}
except KeyError:
logger.warning("Unable to save document root for domain {0}.".format(domain))
def get_gid_by_group_name(group_name):
gid = self._get_ent('group', group_name, 3)
return int(gid) if gid is not None else None
account_name = account['user']
owner = account['owner']
domains_data = {}
processed_subdomains, processed_addon_domains = [], []
main_domain, sub_domains, addon_domains =\
self.cpanel_client.fetch_account_domains(account_name)
main_domain_data, sub_domains_data, addon_domains_data =\
self.cpanel_client.fetch_account_domains_data(account_name)
if main_domain:
home_dir = main_domain_data.get('homedir')
_update_domain_data([main_domain_data], domains_data)
else:
home_dir = None
if sub_domains:
_update_domain_data(sub_domains_data, domains_data)
# Can only be fetched from old API.
try:
sub_domain_data = self.cpanel_client.fetch_subdomains_data(account_name)
for sub_domain in sub_domain_data:
if not isinstance(sub_domain, dict):
continue
if domains_data.get(sub_domain['domain']):
domains_data[sub_domain['domain']]['rootdomain'] = \
sub_domain.get('rootdomain')
processed_subdomains.append(sub_domain['domain'])
except Exception:
logger.exception("Failed to collect subdomains for account: {0}".format(
account_name))
processed_subdomains = []
if addon_domains:
_update_domain_data(addon_domains_data, domains_data)
# Can only be fetched from old API.
try:
addon_domain_data = self.cpanel_client.fetch_addon_domains_data(account_name)
for addon_domain in addon_domain_data:
if not isinstance(addon_domain, dict):
continue
if domains_data.get(addon_domain['domain']):
domains_data[addon_domain['domain']]['subdomain'] =\
addon_domain.get('subdomain')
processed_addon_domains.append(addon_domain['domain'])
except Exception:
logger.exception("Failed to collect addon domains for {0}".format(account_name))
processed_addon_domains = []
try:
emails, filters = self._get_account_emails_and_filters(account_name, home_dir)
except Exception:
logger.exception("Failed to collect emails and filters for {0}".format(account_name))
emails, filters = [], []
gid = get_gid_by_group_name(account_name)
return {
'id': account['uid'],
'gid': gid,
'name': account_name,
'owner': owner,
'domain': main_domain,
'subDomains': processed_subdomains,
'addonDomains': processed_addon_domains,
'homeDir': home_dir,
'dbs': dbs.get(account_name, []),
'emails': emails,
'filters': filters,
'domainsData': domains_data,
}
for account in accounts:
try:
self.meta['accounts'][account['uid']] = _make_metadata_for_account(account)
except Exception:
# DO NOT RAISE
logger.exception(
"Skip account \"{0}\", cannot make metadata.".format(account['user'])
)
def collect_db_servers(self):
self.meta['dbServers'] = {}
mysql_servers = self.cpanel_client.fetch_mysql_servers()
local_mysql_server = mysql_servers.get('localhost')
if not local_mysql_server or not local_mysql_server['active']:
logger.warning("There is no active local mysql server.")
return False
self.meta['dbServers']['localhost'] = {
'host': local_mysql_server['mysql_host'],
'port': local_mysql_server['mysql_port'],
'type': self.DBMS_TYPE_MYSQL,
'dbmsOptions': {
'myCnfPath': '/root/.my.cnf',
'datadir': '/var/lib/mysql/',
'version': self.cpanel_client.fetch_local_mysql_version(),
},
}
logger.info(
"Local mysql server: {0}:{1}".format(
local_mysql_server['mysql_host'],
local_mysql_server['mysql_port'],
)
)
return True
def collect_unmapped_dbs(self):
def get_home_dir_by_uid(uid):
return self._get_ent('passwd', str(uid), 6)
self.meta['unmappedDBs'] = []
skip_lines = set(['Database', 'mysql', 'information_schema', 'performance_schema', 'sys', ''])
for account in self.meta['accounts'].values():
for db in account['dbs']:
skip_lines.add(db['name'])
server = self.meta['dbServers'].get('localhost')
if not server:
logger.warning("Skipped collecting unmapped DBs, there's no localhost MySQL instance.")
return
change_home_cmd = 'export HOME={0}'.format(
get_home_dir_by_uid(os.getuid())
)
cmd = "mysql --host={0} --port={1} --execute='SHOW DATABASES;'".format(
server['host'],
server['port'],
)
result = run_shell('{0} && {1}'.format(change_home_cmd, cmd))
output_list = result.decode().split('\n')
for db in output_list:
if db not in skip_lines:
self.meta['unmappedDBs'].append({'name': db, 'type': server['type']})
def collect_cpanel_info(self):
cpanel_version = self.cpanel_client.fetch_version()
self.meta['cpanel'] = {
'version': cpanel_version,
}
logger.info("CPanel version: {0}".format(cpanel_version))
def collect(self):
self.meta = {
'timestamp': int(time.time()),
}
self.collect_cpanel_info()
if self.collect_db_servers():
self.collect_accounts()
self.collect_unmapped_dbs()
else:
self.collect_accounts(False)
def collect_meta(cpanel_client):
logger.info("Generating metadata...")
try:
cpanel_meta_collector = CPanelMetaCollector(cpanel_client)
cpanel_meta_collector.collect()
# For security reasons - first create an empty file, set permissions and then fill it in
tmp_meta_path = os.path.join(WORK_DIR, 'metadata.json')
with open(tmp_meta_path, 'w') as f:
json.dump({}, f)
os.chmod(tmp_meta_path, 0o600)
with open(tmp_meta_path, 'w') as f:
json.dump(cpanel_meta_collector.meta, f)
shutil.move(tmp_meta_path, TARGET_METADATA_PATH)
except Exception:
logger.exception("Generating metadata has failed")
raise
logger.info('Collecting metadata succeeded.')
def freeze_mysql():
run_shell("mysqladmin ping > /dev/null 2>&1") # raise exception if ping fails
if os.path.isfile(MYSQL_FREEZE_LOCKFILE):
if os.stat(MYSQL_FREEZE_LOCKFILE).st_size > 0:
output = run_shell("ps -p \"$(/bin/cat \"{0}\")\" -o pid=".format(MYSQL_FREEZE_LOCKFILE), raise_e=False, log=False)
if output and int(output) > 0:
raise FreezeError('Can not start freezing because process already exists')
logger.info('Deleting freeze lock file...')
try:
os.remove(MYSQL_FREEZE_LOCKFILE)
except Exception:
raise FreezeError("Unable to remove lockfile: {0}".format(MYSQL_FREEZE_LOCKFILE))
# Prepare freeze query
if MYSQL_FREEZE_ONLY_MYISAM:
sql_myisam_tables = "SET SESSION group_concat_max_len = 4294967295; \
SELECT IFNULL(GROUP_CONCAT(t.db_table SEPARATOR ','), '') as res \
FROM (\
SELECT CONCAT('\`', TABLE_SCHEMA, '\`.\`', TABLE_NAME, '\`') as db_table \
FROM information_schema.TABLES \
WHERE ENGINE='MyISAM' AND TABLE_SCHEMA NOT IN('mysql','information_schema','performance_schema'\
)\
) t;"
cmd = "SQL_MYISAM=\"{0}\"".format(sql_myisam_tables) + " ; mysql --defaults-file=\"/root/.my.cnf\" --execute=\"$SQL_MYISAM\" | tail -n +2 | awk '{$1=$1;print}'"
myisam_tables = run_shell(cmd)
if myisam_tables is None:
logger.info('There are no MyISAM tables to lock.')
return
myisam_tables = myisam_tables.decode()
logger.info("Trying to lock {0} MyISAM tables.".format(myisam_tables.count(',')))
freeze_sql = "\
SELECT CONNECTION_ID();\
FLUSH TABLES {0};\
FLUSH TABLES {0};\
FLUSH TABLES {0} WITH READ LOCK;\
SYSTEM touch \"{1}\";\
SYSTEM sleep {2};".format(myisam_tables.replace('`', '\\`'), MYSQL_FREEZE_LOCKFILE, MYSQL_FREEZE_SNAPSHOT_TIMEOUT)
else:
logger.info('Trying to lock all tables.')
freeze_sql = "\
SELECT CONNECTION_ID();\
FLUSH TABLES;\
FLUSH TABLES;\
FLUSH TABLES WITH READ LOCK;\
SYSTEM touch \"{0}\";\
SYSTEM sleep {1};".format(MYSQL_FREEZE_LOCKFILE, MYSQL_FREEZE_SNAPSHOT_TIMEOUT)
freeze_sql = freeze_sql.replace('"', '\\"')
# Run freeze command
cmd = "mysql --defaults-file=\"/root/.my.cnf\" --unbuffered -N -s --execute=\"{0}\" 1> \"{1}\" 2>> \"{2}\"".format(freeze_sql, MYSQL_OUTPUT_FILE, LOGFILE)
logger.debug(cmd)
proc = subprocess.Popen(cmd, shell=True)
freeze_check_interval_sec = 1
seconds_elapsed = 0
freeze_session_pid = proc.pid
while not os.path.isfile(MYSQL_FREEZE_LOCKFILE):
proc.poll()
if proc.returncode is None:
logger.info('Waiting for mysql to freeze tables...')
time.sleep(freeze_check_interval_sec)
seconds_elapsed += freeze_check_interval_sec
output = run_shell("ps -p {0} -o pid=".format(freeze_session_pid))
if output is None:
raise FreezeError('Seems like MySQL freeze statement failed. Aborted.')
if seconds_elapsed > MYSQL_FREEZE_TIMEOUT:
logger.error('MySQL cannot freeze in suitable time. Aborting...')
try:
os.kill(freeze_session_pid, signal.SIGKILL)
except Exception:
raise FreezeError("Unable to kill {0}".format(freeze_session_pid))
with open(MYSQL_OUTPUT_FILE, 'r') as f:
freeze_thread_id = f.readline()
if freeze_thread_id and int(freeze_thread_id) > 0:
logger.error("Killing MySQL thread. ID is {0}".format(freeze_thread_id))
output = run_shell("mysqladmin --defaults-file=\"/root/.my.cnf\" kill \"{0}\"".format(freeze_thread_id))
raise FreezeError("{0} thread was killed.".format(freeze_thread_id))
raise FreezeError('Temp file does not contain thread ID.')
with open(MYSQL_FREEZE_LOCKFILE, 'w') as f:
f.write(str(freeze_session_pid))
logger.info('Freeze successful')
def is_dir_mounted(abs_path):
mounted = run_shell("mount |grep 'on {0} type'".format(abs_path), False, False)
return True if mounted else False
def purge():
if not os.path.isdir(MOUNT_DIR):
logger.debug("{0} directory not present, skipping purging".format(MOUNT_DIR))
return
logger.debug("Purging mountpoints")
for partition_dir in os.listdir(MOUNT_DIR):
logger.debug("Processing mount dir {0}".format(partition_dir))
tries = 5
partition_abs_dir = os.path.join(MOUNT_DIR, partition_dir)
while is_dir_mounted(partition_abs_dir) and tries > 0:
run_shell("umount -l {0}".format(partition_abs_dir))
tries -= 1
try:
logger.debug("Removing directory {0}".format(partition_abs_dir))
os.rmdir(partition_abs_dir)
except OSError:
logger.debug("Directory {0} could not be removed".format(partition_abs_dir))
if tries <= 0:
raise MountingError
def pre(cpanel_client):
logger.info('Pre-capture data script started.')
purge()
collect_meta(cpanel_client)
logger.info('Placing files with mountpoints...')
output = run_shell(
"findmnt -n -l -it \"{0}\" -o TARGET | egrep -v \"{1}\"".format(MOUNTED_POINTS_EXCLUDED_FS, MOUNTED_POINTS_EXCLUDED_PATTERN)
)
for mnt_point in output.decode().split('\n'):
mp_file = os.path.join(mnt_point, '.mp')
if os.path.isfile(mp_file):
os.remove(mp_file)
with open(mp_file, 'w') as f:
f.write(mnt_point)
if MYSQL_FREEZE:
freeze_mysql()
def post(cpanel_client):
logger.info('Post-capture data script started')
# Cleanup mysql freeze
if os.path.exists(MYSQL_FREEZE_LOCKFILE) and os.path.isfile(MYSQL_FREEZE_LOCKFILE):
freeze_session_pid = ''
with open(MYSQL_FREEZE_LOCKFILE) as pidfile:
freeze_session_pid = pidfile.readline().strip()
if freeze_session_pid:
logger.info('Terminating freeze session. PID is {0}.'.format(freeze_session_pid))
try:
os.kill(freeze_session_pid, signal.SIGKILL)
logger.info('{0} was killed.'.format(freeze_session_pid))
except Exception:
logger.info('Unable to kill {0}.'.format(freeze_session_pid))
else:
logger.info('Lock file does not contain pid of mysql freeze session.')
logger.info('Deleting freeze lock file...')
os.remove(MYSQL_FREEZE_LOCKFILE)
# Remove metadata
try:
os.remove(TARGET_METADATA_PATH)
logger.info("Metadata removed.")
except Exception:
logger.exception("Failed to remove file with metadata.")
logger.info('Post-capture data script finished.')
if __name__ == "__main__":
parser = ArgumentParser(description="Mount attached devices inside VM")
parser.add_argument('action', choices=['pre', 'post'])
parser.add_argument("-v", "--verbose", action="store_true", help="enable script debug output")
parser.add_argument("-tf", "--temp-folder", default=SUPPRESS, help="temporary folder for script actions")
options = parser.parse_args()
stdout_handler = logging.StreamHandler(sys.stdout)
file_handler = logging.FileHandler(LOGFILE)
if options.verbose:
logger.setLevel(logging.DEBUG)
stdout_handler.setLevel(logging.DEBUG)
file_handler.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
stdout_handler.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
logger.addHandler(stdout_handler)
logger.addHandler(file_handler)
if 'temp_folder' in options:
WORK_DIR = os.path.normpath(options.temp_folder)
cpanel_client = CPanelClient()
if options.action == 'pre':
pre(cpanel_client)
elif options.action == 'post':
post(cpanel_client)
# else should not be possible due to argparse choices
|