我们从Python开源项目中,提取了以下42个代码示例,用于说明如何使用fabric.contrib.files.exists()。
def delete_stack_file(stackname): try: core.describe_stack(stackname) # triggers exception if NOT exists LOG.warning('stack %r still exists, refusing to delete stack files. delete active stack first.', stackname) return except BotoServerError as ex: if not ex.message.endswith('does not exist'): LOG.exception("unhandled exception attempting to confirm if stack %r exists", stackname) raise ext_list = [ ".pem", ".pub", ".json", ".yaml", # yaml files are now deprecated ] paths = [join(config.STACK_DIR, stackname + ext) for ext in ext_list] paths = filter(os.path.exists, paths) def _unlink(path): os.unlink(path) return not os.path.exists(path) return dict(zip(paths, map(_unlink, paths)))
def configure_nginx_if_necessary(): nginx_config_path = os.path.join('/etc/nginx/sites-available', env.domain_name) if exists(nginx_config_path): print('nginx config found, not creating another one') else: nginx_config_variables = { 'source_dir': PROJECT_FOLDER, 'domain': env.domain_name, 'ssl_params_path': SSL_PARAMS_PATH, 'fullchain_path': os.path.join(env.letsencrypt_folder, 'fullchain.pem'), 'privkey_path': os.path.join(env.letsencrypt_folder, 'privkey.pem'), 'socket_path': SOCKET_PATH } upload_template( filename='deploy_configs/nginx_config', destination=nginx_config_path, context=nginx_config_variables, use_sudo=True ) nginx_config_alias = os.path.join('/etc/nginx/sites-enabled', env.domain_name) sudo('ln -sf %s %s' % (nginx_config_path, nginx_config_alias))
def step_x_setup_ros_for_ubuntu_mate_pi(): run("echo 'Roughly following http://wiki.ros.org/kinetic/Installation/Ubuntu'") _pp("* If you need to do raspi-config stuff, CTRL-C out and do that before running this script") # Setup ROS Repositories if not fabfiles.exists("/etc/apt/sources.list.d/ros-latest.list"): sudo("apt-get update") sudo("sh -c 'echo \"deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main\" > /etc/apt/sources.list.d/ros-latest.list'") sudo("apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 421C365BD9FF1F717815A3895523BAEEB01FA116") #apt-key adv --keyserver hkp://ha.pool.sks-keyservers.net:80 --recv-key 0xB01FA116") sudo("apt-get update") sudo("apt-get -y upgrade") else: _fp("ros-lastest.list already exists... skipping set up") sudo("apt-get update") sudo("apt-get -y upgrade") sudo("apt-get install -y ros-kinetic-ros-base")
def install_jenkins(*args, **kwargs): home = run('echo $HOME') version = kwargs.get('version', 'latest') init = os.path.join(home,'init') jenkins_base_dir = os.path.join(home, 'jenkins') jenkins_init = os.path.join(init, 'jenkins') port = kwargs.get('port') if not exists(jenkins_base_dir): run('mkdir ' + jenkins_base_dir) if not exists(os.path.join(jenkins_base_dir, 'jenkins.war')): with hide('output'): run('wget http://mirrors.jenkins-ci.org/war/%s/jenkins.war -O ~/jenkins/jenkins.war' % version) if not exists(os.path.join(jenkins_base_dir, 'org.jenkinsci.main.modules.sshd.SSHD.xml')): with hide('output'): run('wget https://templates.wservices.ch/jenkins/org.jenkinsci.main.modules.sshd.SSHD.xml -O ~/jenkins/org.jenkinsci.main.modules.sshd.SSHD.xml') if not exists(init): run('mkdir ~/init') if not exists(jenkins_init): with hide('output'): run('wget https://templates.wservices.ch/jenkins/jenkins.init -O ~/init/jenkins') run('chmod 750 ~/init/jenkins') sed(jenkins_init, 'PORT=HTTP_PORT', 'PORT=%s' % port) run('~/init/jenkins start') else: run('~/init/jenkins restart')
def get_daemon(initfile): """ Parse DAEMON variable in the init file """ re_daemon = re.compile('^DAEMON=(.+)$', re.MULTILINE) if not exists(initfile): return None local_file = get(initfile) f = open(local_file[0], 'r') data = f.read() f.close() match = re_daemon.search(data) daemon = '' if match: daemon = match.group(1) if not exists(daemon): return None return daemon
def load_config(conf_file, base_conf=[], spec_conf=[], delimiter=' '): if exists(conf_file): with hide('output'): config_data = run('cat %s' % conf_file) else: config_data = '' confs = base_conf + spec_conf for conf in confs: param, value = conf.split(delimiter, 1) value = re.sub(r'#.*$', "", str(value)) # Delete comments match = re.search('^%s[ ]?%s[ ]?(.*)' % (param, delimiter), config_data, re.MULTILINE) if match: orig_value = match.group(1).strip() orig_line = '%s' % match.group(0).strip() if orig_value != str(value): if config_data and param in spec_conf: continue # Do not override already existing specific configurations print('%s %s change to %s' % (param, orig_value, value)) sed(conf_file, orig_line, '%s%s%s' % (param, delimiter, value)) else: print('Config OK: %s%s%s' % (param, delimiter, value)) else: print('Add config %s%s%s' % (param, delimiter, value)) append(conf_file, '%s%s%s' % (param, delimiter, value))
def install_nginx(*args, **kwargs): home = run('echo $HOME') nginx_dir = os.path.join(home, 'nginx') nginx_port = kwargs.get('port') run('mkdir -p %s/conf/sites' % nginx_dir) if not exists('%s/conf/nginx.conf' % nginx_dir): run('wget https://templates.wservices.ch/nginx/nginx.conf -O %s' % (os.path.join(nginx_dir, 'conf', 'nginx.conf'))) run('mkdir -p %s/temp' % nginx_dir) run('mkdir -p %s/logs' % nginx_dir) # save the port number in a separete file in ~/nginx/conf/port # This port will be automatically used for future installations if nginx_port and not exists(os.path.join(nginx_dir, 'conf', 'port')): append(os.path.join(os.path.join(nginx_dir, 'conf', 'port')), str(nginx_port)) run('mkdir -p ~/init') if not exists('~/init/nginx'): run('wget https://templates.wservices.ch/nginx/init -O ~/init/nginx') run('chmod 750 ~/init/nginx') if exists('~/nginx/nginx.pid'): run('~/init/nginx restart') else: run('~/init/nginx start')
def install_lighttpd(*args, **kwargs): home = run('echo $HOME') lighttpd_dir = os.path.join(home, 'lighttpd') lighttpd_port = kwargs.get('port') run('mkdir -p %s' % lighttpd_dir) run('wget https://templates.wservices.ch/lighttpd/lighttpd.conf -O %s' % (os.path.join(lighttpd_dir, 'lighttpd.conf'))) run('wget https://templates.wservices.ch/lighttpd/port.conf -O %s' % (os.path.join(lighttpd_dir, 'port.conf'))) append(os.path.join(lighttpd_dir, 'port.conf'), 'server.port = %s' % lighttpd_port) if not exists(os.path.join(lighttpd_dir, 'django.conf')): run('wget https://templates.wservices.ch/lighttpd/django.conf -O %s' % (os.path.join(lighttpd_dir, 'django.conf'))) run('mkdir -p ~/init') if not exists('~/init/lighttpd'): run('wget https://templates.wservices.ch/lighttpd/init -O ~/init/lighttpd') run('chmod 750 ~/init/lighttpd') if exists('~/lighttpd/lighttpd.pid'): run('~/init/lighttpd restart') else: run('~/init/lighttpd start')
def deploy(): run('whoami') local('source venv/bin/activate && pip freeze > requirements.txt') for host in env.hosts: if not exists(REMOTE_DIR): run('mkdir /home/ubuntu/data /home/ubuntu/data/app') local('rsync -avz . {}:{} --delete --exclude-from \'rsync_exclude.txt\''.format(host, REMOTE_DIR)) with cd(REMOTE_DIR): if not exists('venv'): run('virtualenv -p python3 venv') run('source venv/bin/activate && pip install -r requirements.txt') sudo('cp config/nginx/default /etc/nginx/sites-available/') sudo('cp config/uwsgi/uwsgi.ini /etc/uwsgi/apps-available/') if not exists('/etc/uwsgi/apps-enabled/uwsgi.ini'): sudo('ln -s /etc/uwsgi/apps-available/uwsgi.ini /etc/uwsgi/apps-enabled/uwsgi.ini') restart()
def _package_external(directory, package_name, make_clean): """Builds packages with mk-build-deps and dpkg-buildpackage. Args: directory: the path to a repo synced on the VM via vagrant package_name: the name of the debian package that will be created """ if env.pkgfmt != "deb": print("External packages only support deb, not building.") return if not exists(directory): print('path %s does not exist, cannot package' % directory) return print('packaging %s as %s' % (directory, package_name)) run('mkdir -p ~/endaga-packages') with cd('/home/vagrant/'): with cd(directory): run('echo y | sudo mk-build-deps') run('sudo gdebi --n %s-build-deps*.deb' % package_name) run('rm -f %s-build-deps*.deb' % package_name) clean_arg = '' if make_clean == 'yes' else '-nc' run('dpkg-buildpackage -b -uc -us %s' % clean_arg) run('mv %s_*.deb ~/endaga-packages/.' % package_name) run('rm %s_*' % package_name)
def deploy(): """ Deploy application with packaging in mind """ version = get_version() pip_path = os.path.join( REMOTE_PROJECT_LOCATION, version, 'bin', 'pip' ) prepare_release() if not exists(REMOTE_PROJECT_LOCATION): # it may not exist for initial deployment on fresh host run("mkdir -p {}".format(REMOTE_PROJECT_LOCATION)) with cd(REMOTE_PROJECT_LOCATION): # create new virtual environment using venv run('python3 -m venv {}'.format(version)) run("{} install webxample=={} --index-url {}".format( pip_path, version, PYPI_URL )) switch_versions(version) # let's assume that Circus is our process supervision tool # of choice. run('circusctl restart webxample')
def gitrepos(branch=None, fork='sympy'): """ Clone the repo fab vagrant prepare (namely, checkout_cache()) must be run first. By default, the branch checked out is the same one as the one checked out locally. The master branch is not allowed--use a release branch (see the README). No naming convention is put on the release branch. To test the release, create a branch in your fork, and set the fork option. """ with cd("/home/vagrant"): if not exists("sympy-cache.git"): error("Run fab vagrant prepare first") if not branch: # Use the current branch (of this git repo, not the one in Vagrant) branch = local("git rev-parse --abbrev-ref HEAD", capture=True) if branch == "master": raise Exception("Cannot release from master") run("mkdir -p repos") with cd("/home/vagrant/repos"): run("git clone --reference ../sympy-cache.git https://github.com/{fork}/sympy.git".format(fork=fork)) with cd("/home/vagrant/repos/sympy"): run("git checkout -t origin/%s" % branch)
def deploy_hooks_into_build_space(target_within_build_space="redcap/hooks/library"): """ Deploy each extension into build space by running its own deploy.sh. Lacking a deploy.sh, copy the extension files to the build space. For each extension run test.sh if it exists. """ # make sure the target directory exists extension_dir_in_build_space='/'.join([env.builddir, target_within_build_space]) with settings(warn_only=True): if local("test -d %s" % extension_dir_in_build_space).failed: local("mkdir -p %s" % extension_dir_in_build_space) # For each type of hook, make the target directory and deploy its children for feature in os.listdir(env.hooks_deployment_source): # make the target directory feature_fp_in_src = '/'.join([env.hooks_deployment_source, feature]) if os.path.isdir(feature_fp_in_src): # file is a hook type feature_fp_in_target = extension_dir_in_build_space if not os.path.exists(feature_fp_in_target): os.mkdir(feature_fp_in_target) deploy_extension_to_build_space(feature_fp_in_src, feature_fp_in_target)
def deploy_plugins_into_build_space(target_within_build_space="/redcap/plugins"): """ Deploy each extension into build space by running its own deploy.sh. Lacking a deploy.sh, copy the extension files to the build space. For each extension run test.sh if it exists. """ # make sure the target directory exists extension_dir_in_build_space=env.builddir + target_within_build_space with settings(warn_only=True): if local("test -d %s" % extension_dir_in_build_space).failed: local("mkdir -p %s" % extension_dir_in_build_space) # locate every directory plugins_deployment_source/* for (dirpath, dirnames, filenames) in os.walk(env.plugins_deployment_source): for dir in dirnames: source_dir = '/'.join([dirpath,dir]) this_target = os.path.join(extension_dir_in_build_space, dir) deploy_extension_to_build_space(source_dir, this_target)
def deploy_extension_to_build_space(source_dir="", build_target=""): if not os.path.exists(build_target): os.mkdir(build_target) # run the deployment script this_deploy_script = os.path.join(source_dir,'deploy.sh') if os.path.isfile(this_deploy_script): local("bash %s %s" % (this_deploy_script, build_target)) else: # copy files to target local("cp %s/* %s" % (source_dir, build_target)) # run test deployment script this_test_script = os.path.join(source_dir,'test.sh') if os.path.isfile(this_test_script): local("bash %s " % this_test_script)
def deploy_language_to_build_space(): source_dir = env.languages target_dir = env.builddir + "/redcap/languages" if re.match(r'^\[.+\]$', source_dir) is not None: for language in json.loads(env.languages): if os.path.exists(language): local("mkdir -p %s" % target_dir) local('cp %s %s' % (language, target_dir)) else: abort("the language file %s does not exist" % language) elif local('find %s/*.ini -maxdepth 1 -type f | wc -l' % source_dir, capture = True) != '0': if os.path.exists(source_dir) and os.path.exists(target_dir): local('find %s/*.ini -maxdepth 1 -type f -exec rsync {} %s \;' %(source_dir, target_dir)) else: print("Warning: The languages where not provided. English will be used by default.")
def configure_services(): """ Updates the services' init files (e.g. for gunicorn) """ require('code_dir') # Ensure at-least the default gunicorn.py exists with cd(env.code_dir + '/{{ cookiecutter.repo_name }}/settings'): if not files.exists('gunicorn.py', use_sudo=True): sudo('cp gunicorn.py.example gunicorn.py') # Note: DAEMON_TYPE AND DAEMON_FILE_EXTENSION are replaced by hammer automatically source_dir = os.path.join( env.code_dir, 'deploy', '${DAEMON_TYPE}', '${SERVICE_NAME}.${DAEMON_FILE_EXTENSION}', ) # Install the services using hammer install_services_cp([ ('gunicorn-{{cookiecutter.repo_name}}', source_dir.replace('${SERVICE_NAME}', 'gunicorn')), ])
def deployConfigFiles(templateData, *files): """ Deploy configuration files, filling template fields with real deployment data. @param templateData: A C{dict} with the data to fill the templates. @param *files: A list C{(source, destination)} with information about what files to copy """ serverName = templateData['server-name'] for origin, destination in files: specificFilename = os.path.join('deployment', serverName, origin) defaultFilename = os.path.join('deployment', 'default', origin) origin = (specificFilename if os.path.exists(specificFilename) else defaultFilename) destination = destination.format(**templateData) put(origin, destination, use_sudo=True) for key, value in templateData.iteritems(): sed(destination, '\{\{ %s \}\}' % key, value.replace('.', r'\.'), use_sudo=True)
def prepareInstance(username, sshId): """Prepare an instance updating the packages and creating a new user. @param username: The name of the new user. @param sshId: Path to SSH public key (usually ~/.ssh/id_rsa.pub) """ print os.environ['EC2_KEYPAIR_PATH'] with settings(user='ubuntu', key_filename=os.environ['EC2_KEYPAIR_PATH']): password = getpass('Enter a new password for user %s:' % username) password2 = getpass('Enter the password a again:') if password != password2: raise RuntimeError("Passwords don't match") sudo('adduser --disabled-password --gecos ",,," %s' % username) cryptedPassword = _hashPassword(password) sudo('usermod --password %s %s' % (cryptedPassword, username)) sudo('gpasswd --add %s admin' % username) authorizeSshKey(username, sshId) sudo('apt-get update') sudo('DEBIAN_FRONTEND=noninteractive apt-get dist-upgrade -y') if exists('/var/run/reboot-required'): reboot()
def _backup_current_release(self): """ [advanced]\t dpkg likes to blow away your old files when you make new ones. this is a hack to keep them around """ current_release = self._rpath('releases', self._get_current_release()).rstrip('/') should_backup = \ '.old' not in current_release and \ not files.exists(current_release + '.old') if should_backup: sudo('mv %(dir)s %(dir)s.old' % {'dir': current_release}, user=self._user, group=self._group) self._change_symlink_to("%s.old" % current_release)
def update_config(self, content, path): old_file = six.BytesIO() if files.exists(path, use_sudo=self.sudo): fab.get(remote_path=path, local_path=old_file, use_sudo=self.sudo) old_content = old_file.getvalue() need_update = content != old_content if need_update: fabricio.move_file( path_from=path, path_to=path + '.backup', sudo=self.sudo, ignore_errors=True, ) fab.put(six.BytesIO(content), path, use_sudo=self.sudo, mode='0644') fabricio.log('{path} updated'.format(path=path)) else: fabricio.log('{path} not changed'.format(path=path)) return need_update
def push_code(rev='HEAD', virtualenv=True, requirements=True, cur_date=None): if cur_date is None: cur_date = run("date +%d.%m.%y-%H:%M:%S") local('git archive -o /tmp/api.tar.gz '+rev) put('/tmp/api.tar.gz', '/tmp') run('mv api /tmp/latest-api-{}'.format(cur_date)) run('mkdir api') with cd("api"): run('tar xzf /tmp/api.tar.gz') run('rm -rf env') run('cp -r /tmp/latest-api-{}/env env'.format(cur_date)) if virtualenv: if not files.exists('env'): run('virtualenv env') if requirements: with prefix('. env/bin/activate'): run('pip install -r requirements.txt') run('rm -rf /tmp/api-*') run('mv /tmp/latest-api-{} /tmp/api-{}'.format(cur_date, cur_date))
def supervisor(): """ 1. Create new supervisor config file. 2. Copy local config to remote config. 3. Register new command. """ with settings(hide('warnings'), warn_only=True): if exists('/etc/supervisor/conf.d/%s.conf' % env.domain): sudo('rm /etc/supervisor/conf.d/%s.conf' % env.domain) with cd('/etc/supervisor/conf.d'): upload_template( filename=src(req.parse("suarm"), "suarm/tmpl/django_supervisor.conf"), destination='%s.conf' % env.domain, context={ "project_name": env.project, "project_path": get_project_src(env.stage), "app_user": make_user(env.project), }, use_sudo=True, )
def install_uwsgi(self): if self.args.force or prompt(red(' * Install Uwsgi service (y/n)?'), default='y') == 'y': sudo('pip3 install uwsgi') # uwsgi config need real env path with cd(self.python_env_dir): real_env_path = run('pwd') # get user home_user = run('echo $USER') # uwsgi config string django_uwsgi_ini = self.django_uwsgi_ini.format(self.nginx_web_dir, self.project, real_env_path, home_user) # modify uwsgi config file with cd(self.project_dir): if not exists('{0}.ini'.format(self.project)): run('touch {0}.ini'.format(self.project)) put(StringIO(django_uwsgi_ini), '{0}.ini'.format(self.project), use_sudo=True) print(green(' * Installed Uwsgi service in the system.')) print(green(' * Done ')) print()
def common_config_nginx_ssl(self): """ Convert nginx server from http to https """ if prompt(red(' * Change url from http to https (y/n)?'), default='n') == 'y': if not exists(self.nginx_ssl_dir): sudo('mkdir -p {0}'.format(self.nginx_ssl_dir)) # generate ssh key sudo('openssl req -x509 -nodes -days 365 -newkey rsa:2048 -keyout {0}/cert.key -out {0}/cert.pem'.format(self.nginx_ssl_dir)) # do nginx config config put(StringIO(self.nginx_web_ssl_config), '/etc/nginx/sites-available/default', use_sudo=True) sudo('service nginx restart') print(green(' * Make Nginx from http to https.')) print(green(' * Done')) print()
def spark_install(self): """ download and install spark :return: """ sudo('apt-get -y install build-essential python-dev python-six \ python-virtualenv libcurl4-nss-dev libsasl2-dev libsasl2-modules \ maven libapr1-dev libsvn-dev zlib1g-dev') with cd('/tmp'): if not exists('spark.tgz'): sudo('wget {0} -O spark.tgz'.format( bigdata_conf.spark_download_url )) sudo('rm -rf spark-*') sudo('tar -zxf spark.tgz') sudo('rm -rf {0}'.format(bigdata_conf.spark_home)) sudo('mv spark-* {0}'.format(bigdata_conf.spark_home))
def generate_ssh(self, server, args, configure): """ ??????SSH?? generate ssh :param server: :param args: :param configure: :return: """ self.reset_server_env(server, configure) # chmod project root owner sudo('chown {user}:{user} -R {path}'.format( user=configure[server]['user'], path=bigdata_conf.project_root )) # generate ssh key if not exists('~/.ssh/id_rsa.pub'): run('ssh-keygen -t rsa -P "" -f ~/.ssh/id_rsa')
def write_environment_info(stackname, overwrite=False): """Looks for /etc/cfn-info.json and writes one if not found. Must be called with an active stack connection. This gives Salt the outputs available at stack creation, but that were not available at template compilation time. """ if not files.exists("/etc/cfn-info.json") or overwrite: LOG.info('no cfn outputs found or overwrite=True, writing /etc/cfn-info.json ...') infr_config = utils.json_dumps(template_info(stackname)) return put(StringIO(infr_config), "/etc/cfn-info.json", use_sudo=True) LOG.debug('cfn outputs found, skipping') return [] # # #
def create_app_dir(): """Create the application directory and setup a virtualenv.""" # create app dir if exists(remote_app_dir) is False: sudo('mkdir -p ' + remote_app_dir) # create virtual env with cd(remote_app_dir): if exists(remote_app_dir + '/env') is False: sudo('virtualenv env') # Change permissions sudo('chown {}:{} {} -R'.format(env.user, env.user, remote_app_dir)) # Create log dir if exists(remote_log_dir) is False: sudo('mkdir {}'.format(remote_log_dir))
def configure_nginx(): """Configure nginx. Installs nginx config for the application. """ # copy configuration with lcd(local_config_dir): with cd('/etc/nginx/sites-available'): put('./nginx.conf', './{}.conf'.format(app_name), use_sudo=True) # enable configuration if exists('/etc/nginx/sites-enabled/{}.conf'.format(app_name)) is False: sudo('ln -s /etc/nginx/sites-available/{}.conf'.format(app_name) + ' /etc/nginx/sites-enabled/{}.conf'.format(app_name)) # reload configuration sudo('service nginx reload') # END BOOTSTRAPING HELPERS # DEPLOYMENT HELPERS
def fetch_sources_from_repo(branch, code_directory): if exists(code_directory): print('Removing the following directory: %s' % code_directory) sudo('rm -rf %s' % code_directory) git_clone_command = 'git clone {1} {2} --branch {0} --single-branch' sudo(git_clone_command.format(branch, REPOSITORY_URL, code_directory))
def create_dhparam_if_necessary(): if exists(DHPARAM_PATH): print('dhparam file exists, skipping this step') return sudo('openssl dhparam -out %s 2048' % DHPARAM_PATH)
def create_ssl_params_if_necessary(): create_dhparam_if_necessary() if exists(SSL_PARAMS_PATH): print('Not creating ssl-params.conf, already exists') return upload_template( filename='deploy_configs/ssl_params', destination=SSL_PARAMS_PATH, context={'dhparam_path': DHPARAM_PATH}, use_sudo=True )
def configure_letsencrypt_if_necessary(): create_ssl_params_if_necessary() env.letsencrypt_folder = os.path.join('/etc/letsencrypt/live', env.domain_name) print('Assuming letsencrypt folder is %s' % env.letsencrypt_folder) if exists(env.letsencrypt_folder, use_sudo=True): print('letsencrypt folder found, skipping letsencrypt setup') return start_letsencrypt_setup()
def exists(path, remote=True): ''' Check if the path exists in the remote or locally, depending upon the `remote` parameter. ''' if remote: return files.exists(path) return os.path.exists(path)
def _setup_ros_packages_from_git(ros_package_name, git_url, subpackage_list): run("echo 'Starting...'") home_path = run("pwd") git_path = home_path + "/gitspace" ros_package_path = git_path + "/" + ros_package_name #"/rosbots" ws_dir = home_path + WS_DIR install_dir = home_path + INSTALL_DIR _fp("Do we need to create gitspace folder?") if not fabfiles.exists(git_path): run("mkdir " + git_path) _fp("Do we need to git clone the repo?") if not fabfiles.exists(ros_package_path): _fp("Did not find " + ros_package_name + " repo, cloning...") with cd(git_path): run("git clone " + git_url) _fp("Creating symbolic link to main ros workspace") with cd(ws_dir + "/src"): if fabfiles.exists(ros_package_name): run("rm " + ros_package_name) run("ln -s " + ros_package_path) else: #_fp("Found the repo, just fetching top and rebasing") #with cd(ros_package_path): # run("git fetch origin") # run("git rebase origin/master") _pp("Found the repo, not doing anything - feel free to git fetch and rebase manually") for subpackage in subpackage_list: _fp("Compiling " + subpackage + "...") with cd(ws_dir): run("./src/catkin/bin/catkin_make_isolated --pkg " + subpackage + " --install -DCMAKE_BUILD_TYPE=Release --install-space " + install_dir + " -j1")
def _setup_ros_other_packages(rospkg, run_rosdep=True): run("echo 'Starting...'") home_path = run("pwd") ws_dir = home_path + WS_DIR if not fabfiles.exists(ws_dir): _fp("ROS Workspace not found - run the main set up first") return with cd(ws_dir): ts = str(time.time()).split(".")[0] fn = "kinetic-custom_" + str(ts) + "_ros.rosinstall" run("rosinstall_generator " + rospkg + " --rosdistro kinetic --deps --wet-only --tar > " + fn) run("cat " + fn) _pp("Did rosinstall generator create the install file correctly? If so, we're going to merge and update the workspace. (If there are duplicate packages, hit DELETE and REPLACE!)") run("wstool merge -y -t src " + fn) _pp("Did the wstool merge correctly? If so, we are going to update on the install file for the workspace.") run("wstool update --delete-changed-uris -t src") _pp("Did the wstool update correctly? If so, we are going to update dependencies.") if run_rosdep: run("rosdep install --from-paths src --ignore-src --rosdistro kinetic -y -r --os=debian:jessie") _pp("Did the dependencies update ok? If so, let's compile the new packages.") run("./src/catkin/bin/catkin_make_isolated --install -DCMAKE_BUILD_TYPE=Release --install-space " + home_path + INSTALL_DIR + " -j1")
def verify_reboot_performed(self, max_wait=60*30): logger.info("waiting for system to reboot") successful_connections = 0 timeout = time.time() + max_wait while time.time() <= timeout: try: with settings(warn_only=True, abort_exception=FabricFatalException): time.sleep(1) if exists(self.tfile): logger.debug("temp. file still exists, device hasn't rebooted.") continue else: logger.debug("temp. file no longer exists, device has rebooted.") successful_connections += 1 # try connecting 10 times before returning if successful_connections <= 9: continue return except (BaseException): logger.debug("system exit was caught, this is probably because SSH connectivity is broken while the system is rebooting") continue if time.time() > timeout: pytest.fail("Device never rebooted!")
def verify_reboot_not_performed(self, wait=60): time.sleep(wait) assert exists(self.tfile)
def load_db_dump(dump_file): """Given a dump on your home dir on the server, load it to the server's database, overwriting any existing data. BE CAREFUL!""" require('environment') if not files.exists("%(home)s/.pgpass" % env): abort("Please get a copy of .pgpass and put it in your home dir") temp_file = os.path.join(env.home, '%(project)s-%(environment)s.sql' % env) put(dump_file, temp_file) run('psql -h %s -U %s -d %s -f %s' % (env.db_host, env.db_user, env.db, temp_file))
def make_messages(): """Extract English text from code and templates, and update the .po files for translators to translate""" # Make sure gettext is installed local("gettext --help >/dev/null 2>&1") if os.path.exists("locale/fr/LC_MESSAGES/django.po"): local("python manage.py makemessages -a") else: local("python manage.py makemessages -l fr")
def mkdir_or_backup(appname): appfolder = applications+'/'+appname if not exists(appfolder): sudo('mkdir %s' % appfolder) sudo('chown -R www-data:www-data %s' % appfolder) backup = None else: dt = now.strftime('%y-%m-%d-%h-%m') backup = '%s.%s.zip' % (appname, dt) with cd(applications): sudo('zip -r %s %s' % (backup, appname)) return backup