AlkantarClanX12
Current Path : /opt/cloudlinux/venv/lib/python3.11/site-packages/clselect/ |
Current File : //opt/cloudlinux/venv/lib/python3.11/site-packages/clselect/clpassenger.py |
# -*- coding: utf-8 -*- # Copyright © Cloud Linux GmbH & Cloud Linux Software, Inc 2010-2019 All Rights Reserved # # Licensed under CLOUD LINUX LICENSE AGREEMENT # http://cloudlinux.com/docs/LICENSE.TXT from __future__ import absolute_import from __future__ import print_function from __future__ import division import fcntl import pwd import syslog from datetime import datetime from future.utils import iteritems from future.moves import configparser as ConfigParser import io import logging import os import re import subprocess from clcommon import clcaptain, utils from clcommon.cpapi import userdomains from clcommon.utils import get_file_system_in_which_file_is_stored_on from clcommon.utils import get_file_lines, write_file_lines from clcommon.utils import mod_makedirs from clquota import QuotaWrapper, NoSuchUserException, InsufficientPrivilegesException, IncorrectLimitFormatException, \ GeneralException, NoSuchPackageException, QuotaDisabledException from lveapi import PyLve, PyLveError from secureio import set_user_perm, set_root_perm from typing import Dict, Union # NOQA from .clselectexcept import ClSelectExcept from .utils import file_readlines, file_write, s_partition from .utils import get_abs_rel, mkdir_p, file_read, file_writelines from .utils import get_using_realpath_keys, realpaths_are_equal # logger for clpassenger module logger = logging.getLogger(__name__) logger.setLevel(logging.ERROR) # disable output of logs to console null_handler = logging.StreamHandler(open('/dev/null', 'w')) logger.addHandler(null_handler) HTACCESS_BEGIN = '# DO NOT REMOVE. CLOUDLINUX PASSENGER CONFIGURATION BEGIN' HTACCESS_END = '# DO NOT REMOVE. CLOUDLINUX PASSENGER CONFIGURATION END' RACK_PATH = 'config.ru' RACK_TEMPLATE = r'''app = proc do |env| message = "It works!\n" version = "Ruby %s\n" % RUBY_VERSION response = [message, version].join("\n") [200, {"Content-Type" => "text/plain"}, [response]] end run app ''' RESTART_PATH = 'tmp/restart.txt' WSGI_PATH = 'passenger_wsgi.py' WSGI_TEMPLATE = r'''import os import sys sys.path.insert(0, os.path.dirname(__file__)) def application(environ, start_response): start_response('200 OK', [('Content-Type', 'text/plain')]) message = 'It works!\n' version = 'Python %s\n' % sys.version.split()[0] response = '\n'.join([message, version]) return [response.encode()] ''' APPJS_PATH = 'app.js' APPJS_TEMPLATE = r'''var http = require('http'); var server = http.createServer(function(req, res) { res.writeHead(200, {'Content-Type': 'text/plain'}); var message = 'It works!\n', version = 'NodeJS ' + process.versions.node + '\n', response = [message, version].join('\n'); res.end(response); }); server.listen(); ''' def drop_root_perm(user): userpwd = pwd.getpwnam(user) set_user_perm(userpwd.pw_uid, userpwd.pw_gid, exit = False) def get_config_lock(config_path, mode): try: conf_file = open(config_path, mode, errors='surrogateescape') fcntl.flock(conf_file.fileno(), fcntl.LOCK_EX) return conf_file except IOError: return None def release_lock(lock_file): try: lock_file.close() except: pass def write_config(user, config_path, config): """ Write config with locking. Drop permissions if method called as root. """ permissions_dropped = False # we must drop & restore permissions only when we # call this method as root, otherwise we can get unexpected # behavior when we drop permission on higher level, # and restore them here, in deep tree of method calls if os.getegid() == 0 or os.geteuid() == 0: drop_root_perm(user) permissions_dropped = True config_file = None try: check_and_createdir(config_path) config_file = get_config_lock(config_path, 'r') file_content = io.StringIO() config.write(file_content) clcaptain.write(config_path, file_content.getvalue()) # clcaptain raises exception from clcommon.utils.ExternalProgramFailes, IOError # exception ClSelectExcept.ExternalProgramFailed has the same name but different except (IOError, OSError, ClSelectExcept.UnableToSaveData, utils.ExternalProgramFailed) as e: syslog.syslog(syslog.LOG_WARNING, "Can't write {}: {}".format(config_path, e)) finally: release_lock(config_file) if permissions_dropped: set_root_perm(exit=False) def check_and_createdir(path): user_backup_path = os.path.dirname(path) if not os.path.isdir(user_backup_path): try: clcaptain.mkdir(user_backup_path) # clcaptain raises exception from clcommon.utils.ExternalProgramFailes # exception ClSelectExcept.ExternalProgramFailed has the same name but different except (OSError, ClSelectExcept.ExternalProgramFailed, utils.ExternalProgramFailed) as e: raise ClSelectExcept.UnableToSaveData(user_backup_path, e) def get_htaccess_cache_path(user): userpwd = pwd.getpwnam(user) return os.path.join(userpwd.pw_dir, '.cl.selector', 'htaccess_cache') def _get_info_about_htaccess_cache_file(path_to_file): # type: (str) -> Dict """ Get info (stat, first n symbols and file system in which file is stored) about htaccess_cache file """ time_format = '%Y-%m-%d %H:%M:%S' number_of_symbols = 100 file_info = {} if os.path.exists(path_to_file): try: file_stat = os.stat(path_to_file) file_info['file_size'] = file_stat.st_size file_info['gid'] = file_stat.st_gid file_info['uid'] = file_stat.st_uid file_info['permissions'] = oct(file_stat.st_mode) file_info['last_access'] = datetime.fromtimestamp(file_stat.st_atime).strftime(time_format) file_info['last_modification'] = datetime.fromtimestamp(file_stat.st_mtime).strftime(time_format) # Not necessary to read file and get file system if it's empty if file_info['file_size'] == 0: return file_info try: with open(path_to_file, 'r') as f: file_info['first_symbols'] = f.read(number_of_symbols) # move to n symbol from end file f.seek(-number_of_symbols, 2) file_info['last_symbols'] = f.read(number_of_symbols) except (OSError, IOError) as err: file_info['error'] = 'We cannot get first and last %s symbols from "%s" file. Exception: %s' % ( number_of_symbols, path_to_file, err, ) file_info['file_system'] = get_file_system_in_which_file_is_stored_on(path_to_file)['details'] except (OSError, IOError) as err: file_info['error'] = 'We cannot get info about "%s" file. Exception: %s' % ( path_to_file, err, ) return file_info def _get_user_lve_limits(user_uid): # type: (int) -> Union[Dict[int, int, int, int, int, int, int], Dict[str]] """ Getting user lve limits for logging those for next debug """ result = dict() try: py_lve = PyLve() py_lve.initialize() user_limits = py_lve.lve_info(user_uid) result['cpu'] = user_limits.ls_cpu / user_limits.ls_cpu_weight result['pmem'] = user_limits.ls_memory_phy result['vmem'] = user_limits.ls_memory result['io'] = user_limits.ls_io result['iops'] = user_limits.ls_iops result['ep'] = user_limits.ls_enters result['nproc'] = user_limits.ls_nproc except PyLveError as err: result['error'] = 'We cannot get lve limits for user with uid "%s". Exception: %s' % ( user_uid, err, ) return result def _get_user_quota_limits(user_uid): # type: (int) -> Union[Dict[str, str, str], Dict[str]] """ Getting user quota limits for logging those for next debug """ result = dict() user_uid = str(user_uid) try: quota_wrapper = QuotaWrapper() user_quotas = quota_wrapper.get_user_limits(user_uid)[user_uid] result = user_quotas except ( NoSuchUserException, NoSuchPackageException, InsufficientPrivilegesException, GeneralException, IncorrectLimitFormatException, QuotaDisabledException, IOError, OSError ) as err: result['error'] = 'We cannot get quota limits for user with uid "%s". Exception: %s' % ( user_uid, err, ) return result def _log_debug_info_about_user_and_config_file(user, config_path, error): # type: (str, str, Exception) -> None """ Logging info (lve & quota limits) about user and info (stat info, first & last n symbols) about config file """ file_info = _get_info_about_htaccess_cache_file(config_path) debug_info = dict() debug_info['config_file_info'] = file_info debug_info['user_info'] = dict() try: user_uid = pwd.getpwnam(user).pw_uid except KeyError as err: debug_info['user_info']['error'] = 'User "%s" does not exists. Exception: %s' % ( user, err, ) user_uid = None if user_uid is not None: debug_info['user_info']['lve_limits'] = dict() debug_info['user_info']['lve_limits'].update(_get_user_lve_limits(user_uid)) debug_info['user_info']['quota_limits'] = dict() debug_info['user_info']['quota_limits'].update(_get_user_quota_limits(user_uid)) logger.exception(error, exc_info=True, extra=debug_info) def read_config(user): config = ConfigParser.RawConfigParser(strict=False) config_path = get_htaccess_cache_path(user) config_file = get_config_lock(config_path, 'r') if config_file is not None: try: config.readfp(config_file) # LU-1035 except (IOError, OSError) as err: # Logging additional information for next debug _log_debug_info_about_user_and_config_file(user, config_path, err) # LU-1032 except (ConfigParser.ParsingError, ConfigParser.MissingSectionHeaderError): _unlink(config_path) syslog.syslog(syslog.LOG_WARNING, "Config {} is broken.".format(config_path)) # if cought ParsingError - return Empty config config = ConfigParser.RawConfigParser(strict=False) finally: release_lock(config_file) return config, config_path def get_htaccess_cache(user, doc_root): config, _ = read_config(user) if config.has_section(doc_root): try: htaccess_list = config.get(doc_root, 'htaccess_list').split(',') return htaccess_list except ConfigParser.NoOptionError: return None return None def write_htaccess_cache(user, doc_root, data): data = data.split('\n') data = list(filter(bool, data)) config, config_path = read_config(user) if not config.has_section(doc_root): config.add_section(doc_root) config.set(doc_root, 'htaccess_list', ','.join(data)) write_config(user, config_path, config) def update_htaccess_cache(user, path_to_file, doc_root): config, config_path = read_config(user) if config.has_section(doc_root): htaccess_list = config.get(doc_root, 'htaccess_list').split(',') else: config.add_section(doc_root) config.set(doc_root, 'htaccess_list', '') htaccess_list = [] if path_to_file not in htaccess_list: htaccess_list.append(path_to_file) htaccess_list = list(filter(bool, htaccess_list)) config.set(doc_root, 'htaccess_list', ','.join(htaccess_list)) write_config(user, config_path, config) def remove_passenger_lines_from_htaccess(htaccess_filename): """ Removes clpassenger lines from .htaccess to stop application :param htaccess_filename: Application .htaccess path :return: None """ lines = file_readlines(htaccess_filename, errors='surrogateescape') new_lines = [] in_config = False for line in lines: if line.startswith(HTACCESS_BEGIN): in_config = True if line.startswith(HTACCESS_END): in_config = False continue if not in_config: new_lines.append(line) # write new .htaccess new_lines = rm_double_empty_lines(new_lines) file_writelines(htaccess_filename, new_lines, 'w', errors='surrogateescape') def configure(user, directory, alias, interpreter, binary, populate=True, action=None, doc_root=None, startup_file=APPJS_PATH, passenger_log_file=None): """ Configure passenger application :param user: name of unix user :param directory: name of dir in user home :param alias: alias of application :param interpreter: interpreter which execute application :param binary: binary of interpreter that execute application :param populate: True if application have to be be populated :param action: action with apllication. can be transit or None :param doc_root: doc_root :param startup_file: start application file :param passenger_log_file: Passenger log filename to write to app's .htaccess :return: None """ abs_dir, _ = get_abs_rel(user, directory) if os.path.exists(abs_dir) and not os.path.isdir(abs_dir): raise ClSelectExcept.WebAppError( 'Destination exists and it is not a directory') if interpreter not in ('python', 'ruby', 'nodejs'): raise ClSelectExcept.InterpreterError( "Unsupported interpreter ('%s')" % interpreter) user_summary = summary(user) try: app_summary = get_using_realpath_keys(user, directory, user_summary) except KeyError: if doc_root is None: raise ClSelectExcept.NoSuchApplication( 'No such application (or application not configured) "%s"' % directory) else: if action != 'transit': exists_dir = app_summary['directory'] raise ClSelectExcept.WebAppError("Specified directory already used by '%s'" % exists_dir) if not doc_root: doc_root = app_summary['docroot'] # Alias, which is empty, means that user passed uri equaled to doc root # and we don't want to normalize the alias, because normalized empty # alias is point and that alias doesn't work in htaccess if alias != '': alias = os.path.normpath(alias) abs_alias, _ = get_abs_rel(user, os.path.join(doc_root, alias)) htaccess = os.path.join(abs_alias, '.htaccess') htaccess_needs_update = True if os.path.exists(htaccess): htaccess_raw = file_read(htaccess, errors='surrogateescape') if HTACCESS_BEGIN in htaccess_raw: for item in user_summary.values(): # The condition allows to detect common part of aliases # For details see commit message item_alias = os.path.normpath(item['alias']) + os.sep if os.path.dirname(os.path.commonprefix([item_alias, alias + os.sep])) != '': exists_dir = item['directory'] if exists_dir != abs_dir: raise ClSelectExcept.WebAppError( "Specified alias is already used by the other " "application: '%s'. Please, specify another application url." % exists_dir) else: # Do not write to .htaccess, it is already correct htaccess_needs_update = False lines = htaccess_raw.splitlines() else: lines = [] if htaccess_needs_update: lines.append('') lines.append(HTACCESS_BEGIN) lines.append('PassengerAppRoot "%s"' % abs_dir) lines.append('PassengerBaseURI "/%s"' % alias) lines.append('Passenger%s "%s"' % (interpreter.title(), binary)) # for some reason autodetect of `app.js` is not working if interpreter == 'nodejs': lines.append('PassengerAppType node') lines.append('PassengerStartupFile %s' % startup_file) # append PassengerAppLogFile directive if need if passenger_log_file and interpreter in ('python', 'nodejs'): lines.append('PassengerAppLogFile "%s"' % passenger_log_file) lines.append(HTACCESS_END) lines = rm_double_empty_lines(lines) mkdir_p(abs_alias) file_writelines(htaccess, ('%s\n' % line for line in lines), errors='surrogateescape') update_htaccess_cache(user, htaccess, doc_root) if populate: # Also creates startup_file populate_app(user, directory, interpreter, startup_file=startup_file) def fix_homedir(user): for domain_alias, data in iteritems(_summary(user)): _, alias = domain_alias old_home = os.path.commonprefix((data['directory'], data['binary'])) _, _, directory = s_partition(data['directory'], old_home) # old python selector has binary as file # and get_abs_rel does realpath() # while new selector binary is symlink # and realpath works wrong binary_dir = os.path.dirname(data['binary']) binary_name = os.path.basename(data['binary']) _, _, _binary = s_partition(binary_dir, old_home) binary = os.path.join(get_abs_rel(user, _binary)[0], binary_name) htaccess_path = data['htaccess'] _unconfigure(htaccess_path) configure(user, directory, alias, data['interpreter'], binary, doc_root=data['docroot']) def move(user, directory, old_alias, new_alias, old_doc_root=None, new_doc_root=None): app_data = get_using_realpath_keys(user, directory, summary(user)) old_doc_root = old_doc_root or app_data['docroot'] new_doc_root = new_doc_root or old_doc_root old_abs_alias = os.path.join(old_doc_root, old_alias) old_htaccess = os.path.join(old_abs_alias, '.htaccess') new_abs_alias = os.path.join(new_doc_root, new_alias) new_htaccess = os.path.join(new_abs_alias, '.htaccess') if not realpaths_are_equal(user, old_htaccess, new_htaccess): _unconfigure(old_htaccess) lines = file_readlines(old_htaccess, errors='surrogateescape') open(old_htaccess, 'w').close() file_writelines(new_htaccess, lines, 'a', errors='surrogateescape') update_htaccess_cache(user, new_htaccess, new_doc_root) def purge(user): for directory in summary(user): unconfigure(user, directory) def populate_app(user, directory, interpreter, startup_file=APPJS_PATH): """ Populate application :param user: name of unix user :param directory: application path in user's home :param interpreter: interpreter which run application :param startup_file: main application file :return: None """ abs_dir, rel_dir = get_abs_rel(user, directory) app_public = os.path.join(abs_dir, 'public') app_tmp = os.path.join(abs_dir, 'tmp') mkdir_p(app_public) mkdir_p(app_tmp) app_configru = os.path.join(abs_dir, RACK_PATH) app_wsgi = os.path.join(abs_dir, WSGI_PATH) app_js = os.path.join(abs_dir, startup_file) configru_installed = os.path.isfile(app_configru) wsgi_installed = os.path.isfile(app_wsgi) appjs_installed = os.path.isfile(app_js) if configru_installed: configru_unchanged = file_read(app_configru) == RACK_TEMPLATE if wsgi_installed: wsgi_unchanged = file_read(app_wsgi) == WSGI_TEMPLATE if appjs_installed: appjs_unchanged = file_read(app_js) == APPJS_TEMPLATE if interpreter == 'python': if not wsgi_installed: file_write(app_wsgi, WSGI_TEMPLATE) if configru_installed and configru_unchanged: _unlink(app_configru) _unlink(app_js) elif interpreter == 'ruby': if not configru_installed: file_write(app_configru, RACK_TEMPLATE, 'w') if wsgi_installed and wsgi_unchanged: _unlink(app_wsgi) _unlink(app_js) elif interpreter == 'nodejs': if not appjs_installed: # add ability to specify startup path # like 'not/existing/subdir/app.js' dir_path = os.path.dirname(app_js) if not os.path.isdir(dir_path): mod_makedirs(dir_path, 0o755) file_write(app_js, APPJS_TEMPLATE) if appjs_installed and appjs_unchanged: _unlink(app_configru) _unlink(app_wsgi) restart(user, directory) def _unlink(path): try: os.unlink(path) except OSError: pass def _find_htaccess_files(doc_root): p = subprocess.Popen(['/bin/find', doc_root, '-name', '.htaccess'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Process each line as bytes and attempt decoding to UTF-8 clean_lines = [] for line in p.stdout: try: decoded_line = line.decode('utf-8') clean_lines.append(decoded_line.strip()) except UnicodeDecodeError: # Skip lines that cannot be decoded to UTF-8 continue return '\n'.join(clean_lines) def _summary(user, userdomains_data=None): # TODO PTCLLIB-132 # it's using cache info about users domains # this mechanism should be removed after implementing of caching on DA for userdomains cpapi method domain_docroot_pairs = userdomains(user) if userdomains_data is None else userdomains_data domain_alias_docroot = [] for domain, doc_root in domain_docroot_pairs: if doc_root is None: continue htaccess_cache = get_htaccess_cache(user, doc_root) if not htaccess_cache: stdoutdata = _find_htaccess_files(doc_root) write_htaccess_cache(user, doc_root, stdoutdata) htaccess_cache = get_htaccess_cache(user, doc_root) if htaccess_cache is None: # if write_htaccess_cache was unsuccessful, we would still get None here continue for ht_path in htaccess_cache: if ht_path: alias = os.path.dirname(ht_path) domain_alias_docroot.append((domain, alias, doc_root)) return _htaccess_summary(user, domain_alias_docroot) def _htaccess_summary(user, domain_alias_docroot): summ = {} for domain, alias, doc_root in domain_alias_docroot: htaccess = os.path.join(alias, '.htaccess') try: htaccess_raw = file_read(htaccess, errors='surrogateescape') except (IOError, OSError): continue approot = re.search( '^PassengerAppRoot\s+"?(?P<directory>.+?)"?$', htaccess_raw, re.MULTILINE) if not approot: continue interpreter = re.search( '^Passenger(?P<interpreter>Python|Ruby|Nodejs)\s+' '"?(?P<binary>.+?)"?$', htaccess_raw, re.MULTILINE) if not interpreter: continue alias_abs, _ = get_abs_rel(user, alias) doc_root_abs, _ = get_abs_rel(user, doc_root) _, _, alias = s_partition(alias_abs, doc_root_abs) alias = alias.lstrip(os.sep) domain_alias = (domain, alias,) # detect what alias belongs domain appuri = re.search('^PassengerBaseURI\s+"?(?P<appuri>.+?)"?$', htaccess_raw, re.MULTILINE) if appuri and not compare_aliases(appuri.groupdict()['appuri'], alias): continue summ[domain_alias] = { 'htaccess': htaccess, 'domain': domain, 'docroot': doc_root, 'directory': approot.groupdict()['directory'], 'interpreter': interpreter.groupdict()['interpreter'].lower(), 'binary': interpreter.groupdict()['binary'], } return summ def compare_aliases(alias1, alias2): return os.path.normpath(alias1.strip('/')) == os.path.normpath(alias2.strip('/')) #FIXME: Need join/rewrite "summary" and "_summary" functions def summary(user, userdomains_data=None): summ_result = {} for domain_alias, value in iteritems(_summary(user, userdomains_data=userdomains_data)): domain, alias = domain_alias app_root = value['directory'] try: _, directory = get_abs_rel(user, app_root) except ClSelectExcept.WrongData: syslog.syslog( syslog.LOG_WARNING, '{} is broken, directory {} is not in user\'s home.'.format( os.path.join(alias, '.htaccess'), app_root )) continue value['alias'] = alias try: app_summary = get_using_realpath_keys(user, directory, summ_result) except KeyError: value['domains'] = [domain] summ_result[directory] = value else: # add domains key if directory has multiple domains if 'domains' not in app_summary: app_summary['domains'] = [] else: app_summary['domains'].append(domain) return summ_result def unconfigure(user, directory): app_data = get_using_realpath_keys(user, directory, summary(user)) htaccess = app_data['htaccess'] _unconfigure(htaccess) def _unconfigure(htaccess): htaccess_raw = file_read(htaccess, errors='surrogateescape') lines = htaccess_raw.splitlines() new_lines = [] in_config = False for line in lines: if line == HTACCESS_BEGIN: in_config = True continue if line == HTACCESS_END: in_config = False continue if in_config: continue new_lines.append(line) lines = rm_double_empty_lines(new_lines) file_writelines(htaccess, ('%s\n' % line for line in lines), 'w', errors='surrogateescape') def iter_path(root, sub): for p in sub.split(os.sep): root = os.path.join(root, p) yield root def restart(user, directory): abs_dir, _ = get_abs_rel(user, directory) if not os.path.exists(abs_dir): raise ClSelectExcept.MissingApprootDirectory("Missing directory %(abs_dir)s" % {'abs_dir': abs_dir}) tmp_dir = os.path.join(abs_dir, 'tmp') if not os.path.exists(tmp_dir): os.mkdir(tmp_dir) app_restart = os.path.join(abs_dir, RESTART_PATH) # imitation system 'touch' if os.path.exists(app_restart): os.utime(app_restart, None) else: open(app_restart, 'a').close() def rm_double_empty_lines(lines): _lines = [] empty_line = True for line in lines: if line.strip(): empty_line = False elif empty_line: continue else: empty_line = True _lines.append(line) if empty_line: return _lines[:-1] return _lines