Python os 模块,makedirs() 实例源码

我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用os.makedirs()

项目:charm-plumgrid-gateway    作者:openstack    | 项目源码 | 文件源码
def sync_helpers(include, src, dest, options=None):
    if not os.path.isdir(dest):
        os.makedirs(dest)

    global_options = parse_sync_options(options)

    for inc in include:
        if isinstance(inc, str):
            inc, opts = extract_options(inc, global_options)
            sync(src, dest, inc, opts)
        elif isinstance(inc, dict):
            # could also do nested dicts here.
            for k, v in six.iteritems(inc):
                if isinstance(v, list):
                    for m in v:
                        inc, opts = extract_options(m, global_options)
                        sync(src, dest, '%s.%s' % (k, inc), opts)
项目:charm-plumgrid-gateway    作者:openstack    | 项目源码 | 文件源码
def mkdir(path, owner='root', group='root', perms=0o555, force=False):
    """Create a directory"""
    log("Making dir {} {}:{} {:o}".format(path, owner, group,
                                          perms))
    uid = pwd.getpwnam(owner).pw_uid
    gid = grp.getgrnam(group).gr_gid
    realpath = os.path.abspath(path)
    path_exists = os.path.exists(realpath)
    if path_exists and force:
        if not os.path.isdir(realpath):
            log("Removing non-directory file {} prior to mkdir()".format(path))
            os.unlink(realpath)
            os.makedirs(realpath, perms)
    elif not path_exists:
        os.makedirs(realpath, perms)
    os.chown(realpath, uid, gid)
    os.chmod(realpath, perms)
项目:safetyculture-sdk-python    作者:SafetyCulture    | 项目源码 | 文件源码
def create_directory_if_not_exists(logger, path):
    """
    Creates 'path' if it does not exist

    If creation fails, an exception will be thrown

    :param logger:  the logger
    :param path:    the path to ensure it exists
    """
    try:
        os.makedirs(path)
    except OSError as ex:
        if ex.errno == errno.EEXIST and os.path.isdir(path):
            pass
        else:
            log_critical_error(logger, ex, 'An error happened trying to create ' + path)
            raise
项目:safetyculture-sdk-python    作者:SafetyCulture    | 项目源码 | 文件源码
def save_exported_media_to_file(logger, export_dir, media_file, filename, extension):
    """
    Write exported media item to disk at specified location with specified file name.
    Any existing file with the same name will be overwritten.
    :param logger:      the logger
    :param export_dir:  path to directory for exports
    :param media_file:  media file to write to disc
    :param filename:    filename to give exported image
    :param extension:   extension to give exported image
    """
    if not os.path.exists(export_dir):
        logger.info("Creating directory at {0} for media files.".format(export_dir))
        os.makedirs(export_dir)
    file_path = os.path.join(export_dir, filename + '.' + extension)
    if os.path.isfile(file_path):
        logger.info('Overwriting existing report at ' + file_path)
    try:
        with open(file_path, 'wb') as out_file:
            shutil.copyfileobj(media_file.raw, out_file)
        del media_file
    except Exception as ex:
        log_critical_error(logger, ex, 'Exception while writing' + file_path + ' to file')
项目:nanostat    作者:wdecoster    | 项目源码 | 文件源码
def main():
    args = get_args()
    if not os.path.exists(args.outdir):
        os.makedirs(args.outdir)
    sources = [args.fastq, args.bam, args.summary]
    sourcename = ["fastq", "bam", "summary"]
    datadf = nanoget.get_input(
        source=[n for n, s in zip(sourcename, sources) if s][0],
        files=[f for f in sources if f][0],
        threads=args.threads,
        readtype=args.readtype,
        combine="track")
    if args.name:
        output = args.name
    else:
        output = os.path.join(args.outdir, args.prefix + "NanoStats.txt")
    write_stats([datadf], output)
项目:safetyculture-sdk-python    作者:SafetyCulture    | 项目源码 | 文件源码
def create_directory_if_not_exists(self, path):
        """
        Creates 'path' if it does not exist

        If creation fails, an exception will be thrown

        :param path:    the path to ensure it exists
        """
        try:
            os.makedirs(path)
        except OSError as ex:
            if ex.errno == errno.EEXIST and os.path.isdir(path):
                pass
            else:
                self.log_critical_error(ex, 'An error happened trying to create ' + path)
                raise
项目:Causality    作者:vcla    | 项目源码 | 文件源码
def downloadExamples(examples,connType,conn=False):
    print("===========")
    print("DOWNLOADING")
    print("===========")
    try:
        os.makedirs(kResultStorageFolder)
    except OSError:
        if not os.path.isdir(kResultStorageFolder):
            raise
    leaveconn = True
    if not conn:
        leaveconn = False
        conn = getDB(connType)
    for example in examples:
        print("---------\nEXAMPLE: {}\n-------".format(example))
        example, room = example.rsplit('_',1)
        getExampleFromDB(example, connType, conn)
    if not leaveconn:
        conn.close()
项目:python-    作者:secondtonone1    | 项目源码 | 文件源码
def workthread(item, user_agent,path):
    strurl = 'http://yxpjw.club'+item[0]
    picname = item[1]
    print('????%s...........................\n' %(picname))
    req = request.Request(strurl)
    req.add_header('User-Agent',user_agent)
    response = request.urlopen(req)
    content = response.read().decode('gbk')
    strurl2 = re.search(r'^(.*)/',strurl).group(0)
    print('https headers...............%s'%(strurl2))
    #destname = os.path.join(path,picname+'.txt')
    #with open(destname, 'w',encoding='gbk') as file:
        #file.write(content)
    destdir = os.path.join(path,picname)
    os.makedirs(destdir)
    page = 1
    while(1):
        content = getpagedata(content,destdir,page,strurl2)
        if not content:
            break
        page = page + 1
    print('%s?????????\n'%(picname))
项目:alfred-mpd    作者:deanishe    | 项目源码 | 文件源码
def save_to_path(self, filepath):
        """Save retrieved data to file at ``filepath``.

        .. versionadded: 1.9.6

        :param filepath: Path to save retrieved data.

        """
        filepath = os.path.abspath(filepath)
        dirname = os.path.dirname(filepath)
        if not os.path.exists(dirname):
            os.makedirs(dirname)

        self.stream = True

        with open(filepath, 'wb') as fileobj:
            for data in self.iter_content():
                fileobj.write(data)
项目:safetyculture-sdk-python    作者:SafetyCulture    | 项目源码 | 文件源码
def create_directory_if_not_exists(logger, path):
    """
    Creates 'path' if it does not exist

    If creation fails, an exception will be thrown

    :param logger:  the logger
    :param path:    the path to ensure it exists
    """
    try:
        os.makedirs(path)
    except OSError as ex:
        if ex.errno == errno.EEXIST and os.path.isdir(path):
            pass
        else:
            log_critical_error(logger, ex, 'An error happened trying to create ' + path)
            raise
项目:python-    作者:secondtonone1    | 项目源码 | 文件源码
def get(self, resource):
        """
        Get a resource into the cache,

        :param resource: A :class:`Resource` instance.
        :return: The pathname of the resource in the cache.
        """
        prefix, path = resource.finder.get_cache_info(resource)
        if prefix is None:
            result = path
        else:
            result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
            dirname = os.path.dirname(result)
            if not os.path.isdir(dirname):
                os.makedirs(dirname)
            if not os.path.exists(result):
                stale = True
            else:
                stale = self.is_stale(resource, path)
            if stale:
                # write the bytes of the resource to the cache location
                with open(result, 'wb') as f:
                    f.write(resource.bytes)
        return result
项目:PyWallet    作者:AndreMiras    | 项目源码 | 文件源码
def delete_account(self, account):
        """
        Deletes the given `account` from the `keystore_dir` directory.
        Then deletes it from the `AccountsService` account manager instance.
        In fact, moves it to another location; another directory at the same
        level.
        """
        app = self.app
        keystore_dir = app.services.accounts.keystore_dir
        deleted_keystore_dir = PyWalib.deleted_account_dir(keystore_dir)
        # create the deleted account dir if required
        if not os.path.exists(deleted_keystore_dir):
            os.makedirs(deleted_keystore_dir)
        # "removes" it from the file system
        account_filename = os.path.basename(account.path)
        deleted_account_path = os.path.join(
            deleted_keystore_dir, account_filename)
        shutil.move(account.path, deleted_account_path)
        # deletes it from the `AccountsService` account manager instance
        account_service = self.get_account_list()
        account_service.accounts.remove(account)
项目:PyWallet    作者:AndreMiras    | 项目源码 | 文件源码
def test_delete_account_already_exists(self):
        """
        If the destination (backup/trash) directory where the account is moved
        already exists, it should be handled gracefully.
        This could happens if the account gets deleted, then reimported and
        deleted again, refs:
        https://github.com/AndreMiras/PyWallet/issues/88
        """
        pywalib = self.pywalib
        account = self.helper_new_account()
        # creates a file in the backup/trash folder that would conflict
        # with the deleted account
        deleted_keystore_dir = PyWalib.deleted_account_dir(self.keystore_dir)
        os.makedirs(deleted_keystore_dir)
        account_filename = os.path.basename(account.path)
        deleted_account_path = os.path.join(
            deleted_keystore_dir, account_filename)
        # create that file
        open(deleted_account_path, 'a').close()
        # then deletes the account and verifies it worked
        self.assertEqual(len(pywalib.get_account_list()), 1)
        pywalib.delete_account(account)
        self.assertEqual(len(pywalib.get_account_list()), 0)
项目:treecat    作者:posterior    | 项目源码 | 文件源码
def generate_dataset_file(num_rows, num_cols, num_cats=4, rate=1.0):
    """Generate a random dataset.

    Returns:
        The path to a gzipped pickled data table.
    """
    path = os.path.join(DATA, '{}-{}-{}-{:0.1f}.dataset.pkz'.format(
        num_rows, num_cols, num_cats, rate))
    if os.path.exists(path):
        return path
    print('Generating {}'.format(path))
    if not os.path.exists(DATA):
        os.makedirs(DATA)
    dataset = generate_dataset(num_rows, num_cols, num_cats, rate)
    pickle_dump(dataset, path)
    return path
项目:treecat    作者:posterior    | 项目源码 | 文件源码
def generate_model_file(num_rows, num_cols, num_cats=4, rate=1.0):
    """Generate a random model.

    Returns:
        The path to a gzipped pickled model.
    """
    path = os.path.join(DATA, '{}-{}-{}-{:0.1f}.model.pkz'.format(
        num_rows, num_cols, num_cats, rate))
    V = num_cols
    K = V * (V - 1) // 2
    if os.path.exists(path):
        return path
    print('Generating {}'.format(path))
    if not os.path.exists(DATA):
        os.makedirs(DATA)
    dataset_path = generate_dataset_file(num_rows, num_cols, num_cats, rate)
    dataset = pickle_load(dataset_path)
    table = dataset['table']
    tree_prior = np.zeros(K, dtype=np.float32)
    config = make_config(learning_init_epochs=5)
    model = train_model(table, tree_prior, config)
    pickle_dump(model, path)
    return path
项目:stalker_pyramid    作者:eoyilmaz    | 项目源码 | 文件源码
def create_default_project(cls, path, name='DefaultProject'):
        """Creates default maya project structure along with a suitable
        workspace.mel file.

        :param str path: The path that the default project structure will be
          created.

        :return:
        """
        project_path = os.path.join(path, name)

        # lets create the structure
        for dir_name in cls.default_project_structure.split('\n'):
            dir_path = os.path.join(project_path, dir_name)
            try:
                os.makedirs(dir_path)
            except OSError:
                pass

        # create the workspace.mel
        workspace_mel_path = os.path.join(project_path, 'workspace.mel')
        with open(workspace_mel_path, 'w+') as f:
            f.writelines(cls.default_workspace_content)

        return project_path
项目:my-first-blog    作者:AnkurBegining    | 项目源码 | 文件源码
def get(self, resource):
        """
        Get a resource into the cache,

        :param resource: A :class:`Resource` instance.
        :return: The pathname of the resource in the cache.
        """
        prefix, path = resource.finder.get_cache_info(resource)
        if prefix is None:
            result = path
        else:
            result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
            dirname = os.path.dirname(result)
            if not os.path.isdir(dirname):
                os.makedirs(dirname)
            if not os.path.exists(result):
                stale = True
            else:
                stale = self.is_stale(resource, path)
            if stale:
                # write the bytes of the resource to the cache location
                with open(result, 'wb') as f:
                    f.write(resource.bytes)
        return result
项目:polyaxon-cli    作者:polyaxon    | 项目源码 | 文件源码
def get_config_file_path(cls):
        if not cls.IS_GLOBAL:
            # local to this directory
            base_path = os.path.join('.')
        else:
            base_path = os.path.expanduser('~')
            if not os.access(base_path, os.W_OK):
                base_path = '/tmp'

            base_path = os.path.join(base_path, '.polyaxon')

            if not os.path.exists(base_path):
                try:
                    os.makedirs(base_path)
                except OSError:
                    # Except permission denied and potential race conditions
                    # in multi-threaded environments.
                    logger.error('Could not create config directory `{}`'.format(base_path))

        return os.path.join(base_path, cls.CONFIG_FILE_NAME)
项目:OldMunkiPackages    作者:aysiu    | 项目源码 | 文件源码
def trash_old_stuff(trashlist, trashpath, newpath):
    if isinstance(trashlist, list):
        for old_location in trashlist:
            # Get the subfolders needed to be created
            path_within_destination=os.path.relpath(old_location, trashpath)
            # Create what will be the destination path
            new_location=os.path.join(newpath, path_within_destination)
            # Make sure all the relevant subfolders exist in the destination
            if not os.path.exists(os.path.dirname(new_location)):
                os.makedirs(os.path.dirname(new_location))
            # Even though we've been double-checking paths all along, let's just make one last check
            if os.path.exists(old_location) and os.path.isdir(newpath):
                os.rename(old_location, new_location)
                logging.info("Moving %s to %s\n" % (old_location, new_location))
            else:
                logging.error("One of %s or %s does not exist\n" % (old_location, new_location))
    else:
        logging.error("%s is not a valid list\n" % trashlist)

# Function that checks paths are writable
项目:charm-swift-proxy    作者:openstack    | 项目源码 | 文件源码
def mkdir(path, owner='root', group='root', perms=0o555, force=False):
    """Create a directory"""
    log("Making dir {} {}:{} {:o}".format(path, owner, group,
                                          perms))
    uid = pwd.getpwnam(owner).pw_uid
    gid = grp.getgrnam(group).gr_gid
    realpath = os.path.abspath(path)
    path_exists = os.path.exists(realpath)
    if path_exists and force:
        if not os.path.isdir(realpath):
            log("Removing non-directory file {} prior to mkdir()".format(path))
            os.unlink(realpath)
            os.makedirs(realpath, perms)
    elif not path_exists:
        os.makedirs(realpath, perms)
    os.chown(realpath, uid, gid)
    os.chmod(realpath, perms)
项目:charm-swift-proxy    作者:openstack    | 项目源码 | 文件源码
def mkdir(path, owner='root', group='root', perms=0o555, force=False):
    """Create a directory"""
    log("Making dir {} {}:{} {:o}".format(path, owner, group,
                                          perms))
    uid = pwd.getpwnam(owner).pw_uid
    gid = grp.getgrnam(group).gr_gid
    realpath = os.path.abspath(path)
    path_exists = os.path.exists(realpath)
    if path_exists and force:
        if not os.path.isdir(realpath):
            log("Removing non-directory file {} prior to mkdir()".format(path))
            os.unlink(realpath)
            os.makedirs(realpath, perms)
    elif not path_exists:
        os.makedirs(realpath, perms)
    os.chown(realpath, uid, gid)
    os.chmod(realpath, perms)
项目:BioDownloader    作者:biomadeira    | 项目源码 | 文件源码
def download_sifts_from_ebi(identifier, override=False):
    """
    Downloads a SIFTS xml from the EBI FTP to the filesystem.

    :param identifier: (str) PDB ID
    :param override: (boolean)
    :return: (side effects)
    """

    filename = "{}.xml.gz".format(identifier)
    outputfile = os.path.join(config.db_root, config.db_sifts, filename)
    os.makedirs(os.path.join(config.db_root, config.db_sifts), exist_ok=True)

    url_root = config.ftp_sifts
    url_endpoint = "{}.xml.gz".format(identifier)
    url = url_root + url_endpoint
    Downloader(url=url, outputfile=outputfile,
               decompress=True, override=override)
    return
项目:BioDownloader    作者:biomadeira    | 项目源码 | 文件源码
def download_data_from_uniprot(identifier, file_format="fasta", override=False):
    """
    Downloads a UniProt fasta, gff or txt to the filesystem.

    :param identifier: (str) UniProt ID
    :param file_format: (str) endpoint
    :param override: (boolean)
    :return: (side effects)
    """

    file_format = file_format.lstrip('.')
    if file_format in ['txt', 'fasta', 'gff']:
        filename = "{}.{}".format(identifier, file_format)
        outputfile = os.path.join(config.db_root, config.db_uniprot, filename)
        os.makedirs(os.path.join(config.db_root, config.db_uniprot), exist_ok=True)

        url_root = config.http_uniprot
        url_endpoint = "{}.{}".format(identifier, file_format)
        url = url_root + url_endpoint
        Downloader(url=url, outputfile=outputfile,
                   decompress=True, override=override)
    else:
        raise ValueError("File format {} is not currently implemented..."
                         "".format(file_format))
    return
项目:BioDownloader    作者:biomadeira    | 项目源码 | 文件源码
def download_alignment_from_pfam(identifier, alignment_size="seed",
                                 override=False):
    """
    Downloads a MSA in Stockholm format from Pfam to the filesystem.

    :param identifier: (str) PFam ID
    :param alignment_size: (str) either "seed" or "full"
    :param override: (boolean)
    :return: (side effects)
    """

    filename = "{}.sth".format(identifier)
    outputfile = os.path.join(config.db_root, config.db_pfam, filename)
    os.makedirs(os.path.join(config.db_root, config.db_pfam), exist_ok=True)

    url_root = config.http_pfam
    url_endpoint = ("family/{}/alignment/{}"
                    "".format(identifier, alignment_size))
    url = url_root + url_endpoint
    Downloader(url=url, outputfile=outputfile,
               decompress=True, override=override)
    return
项目:human-rl    作者:gsastry    | 项目源码 | 文件源码
def predict_episodes(self, model, episode_paths, n=None, out_dir=None, prefix="model/"):
        if n is not None:
            episode_paths = np.random.choice(episode_paths, n, replace=False)
        if out_dir is not None:
            os.makedirs(out_dir, exist_ok=True)
        for ep, episode_path in enumerate(episode_paths):
            episode = frame.load_episode(episode_path)
            features = self.load_features_episode(episode)
            prediction = model.predict_proba(features)
            for i in range(len(prediction)):
                episode.frames[i].info[prefix + "score"] = prediction[i]
                episode.frames[i].info[prefix + "label"] = model.apply_threshold(prediction[i])
            out_path = episode_path
            if out_dir is not None:
                out_path = os.path.join(out_dir, "{}.pkl.gz".format(ep))
            frame.save_episode(out_path, episode)
项目:human-rl    作者:gsastry    | 项目源码 | 文件源码
def predict_episodes(self, model, episode_paths, n=None, out_dir=None, prefix="model/"):
        if n is not None:
            episode_paths = np.random.choice(episode_paths, n, replace=False)
        if out_dir is not None:
            os.makedirs(out_dir, exist_ok=True)
        for ep, episode_path in enumerate(episode_paths):
            episode = frame.load_episode(episode_path)
            features = self.load_features_episode(episode)
            prediction = model.predict_proba(features)
            for i in range(len(prediction)):
                episode.frames[i].info[prefix + "score"] = prediction[i]
                episode.frames[i].info[prefix + "label"] = model.apply_threshold(prediction[i])
            out_path = episode_path
            if out_dir is not None:
                out_path = os.path.join(out_dir, "{}.pkl.gz".format(ep))
            frame.save_episode(out_path, episode)
项目:human-rl    作者:gsastry    | 项目源码 | 文件源码
def predict_episodes(self, model, episode_paths, n=None, out_dir=None, prefix="model/"):
        if n is not None:
            episode_paths = np.random.choice(episode_paths, n, replace=False)
        if out_dir is not None:
            os.makedirs(out_dir, exist_ok=True)
        for ep, episode_path in enumerate(episode_paths):
            episode = frame.load_episode(episode_path)
            features = self.load_features_episode(episode)
            prediction = model.predict_proba(features)
            for i in range(len(prediction)):
                episode.frames[i].info[prefix + "score"] = prediction[i]
                episode.frames[i].info[prefix + "label"] = model.apply_threshold(prediction[i])
            out_path = episode_path
            if out_dir is not None:
                out_path = os.path.join(out_dir, "{}.pkl.gz".format(ep))
            frame.save_episode(out_path, episode)
项目:human-rl    作者:gsastry    | 项目源码 | 文件源码
def convert_episode_to_tf_records(base_directory, new_directory, dataloader, path):
    episode = frame.load_episode(path)
    features, labels = dataloader.load_features_and_labels_episode(episode)
    assert path.rfind(base_directory) > -1
    new_path = path[path.rfind(base_directory) + len(base_directory) + 1:]
    new_path = os.path.splitext(new_path)[0]
    new_path = os.path.splitext(new_path)[0]
    new_path = os.path.join(new_directory, new_path + ".tfrecord")
    options = tf.python_io.TFRecordOptions(
        compression_type=tf.python_io.TFRecordCompressionType.GZIP)
    os.makedirs(new_path, exist_ok=True)
    for i, f in enumerate(episode.frames):
        writer = tf.python_io.TFRecordWriter(
            os.path.join(new_path, "{}.tfrecord".format(i)), options=options)
        example = tf.train.Example(features=tf.train.Features(feature={
            'action': _int64_feature([f.action]),
            'label': _int64_feature([f.label] if f.label is not None else []),
            'observation': _float_feature(f.observation.reshape(-1)),
            'observation_shape': _int64_feature(f.observation.shape),
            'image': _bytes_feature([f.image.tobytes()]),
            'image_shape': _int64_feature(f.image.shape),
        }))
        writer.write(example.SerializeToString())
        writer.close()
    return new_path
项目:human-rl    作者:gsastry    | 项目源码 | 文件源码
def predict_episodes(self, model, episode_paths, n=None, out_dir=None, prefix="model/"):
        if n is not None:
            episode_paths = np.random.choice(episode_paths, n, replace=False)
        if out_dir is not None:
            os.makedirs(out_dir, exist_ok=True)
        for ep, episode_path in enumerate(episode_paths):
            episode = frame.load_episode(episode_path)
            features = self.load_features_episode(episode)
            prediction = model.predict_proba(features)
            for i in range(len(prediction)):
                episode.frames[i].info[prefix + "score"] = prediction[i]
                episode.frames[i].info[prefix + "label"] = model.apply_threshold(prediction[i])
            out_path = episode_path
            if out_dir is not None:
                out_path = os.path.join(out_dir, "{}.pkl.gz".format(ep))
            frame.save_episode(out_path, episode)
项目:seq2seq    作者:google    | 项目源码 | 文件源码
def main():
  """Main function"""

  if ARGS.type == "copy":
    generate_fn = make_copy
  elif ARGS.type == "reverse":
    generate_fn = make_reverse

  # Generate dataset
  examples = list(generate_fn(ARGS.num_examples, ARGS.min_len, ARGS.max_len))
  try:
    os.makedirs(ARGS.output_dir)
  except OSError:
    if not os.path.isdir(ARGS.output_dir):
      raise

  # Write train data
  train_sources, train_targets = zip(*examples)
  write_parallel_text(train_sources, train_targets, ARGS.output_dir)
项目:zipline-chinese    作者:zhanghan1990    | 项目源码 | 文件源码
def add_security_data(adds, deletes):
    if not hasattr(security_list, 'using_copy'):
        raise Exception('add_security_data must be used within '
                        'security_list_copy context')
    directory = os.path.join(
        security_list.SECURITY_LISTS_DIR,
        "leveraged_etf_list/20150127/20150125"
    )
    if not os.path.exists(directory):
        os.makedirs(directory)
    del_path = os.path.join(directory, "delete")
    with open(del_path, 'w') as f:
        for sym in deletes:
            f.write(sym)
            f.write('\n')
    add_path = os.path.join(directory, "add")
    with open(add_path, 'w') as f:
        for sym in adds:
            f.write(sym)
            f.write('\n')
项目:zipline-chinese    作者:zhanghan1990    | 项目源码 | 文件源码
def write_state_to_disk(cls, state, emission_rate=None):
    state_dir = cls.__module__ + '.' + cls.__name__

    full_dir = base_state_dir + '/' + state_dir

    if not os.path.exists(full_dir):
        os.makedirs(full_dir)

    if emission_rate is not None:
        name = 'State_Version_' + emission_rate + \
            str(state['obj_state'][VERSION_LABEL])
    else:
        name = 'State_Version_' + str(state['obj_state'][VERSION_LABEL])

    full_path = full_dir + '/' + name

    f = open(full_path, 'w')

    pickle.dump(state, f)

    f.close()
项目:okta-awscli    作者:jmhale    | 项目源码 | 文件源码
def write_sts_token(self, profile, access_key_id, secret_access_key, session_token):
        """ Writes STS auth information to credentials file """
        region = 'us-east-1'
        output = 'json'
        if not os.path.exists(self.creds_dir):
            os.makedirs(self.creds_dir)
        config = RawConfigParser()

        if os.path.isfile(self.creds_file):
            config.read(self.creds_file)

        if not config.has_section(profile):
            config.add_section(profile)

        config.set(profile, 'output', output)
        config.set(profile, 'region', region)
        config.set(profile, 'aws_access_key_id', access_key_id)
        config.set(profile, 'aws_secret_access_key', secret_access_key)
        config.set(profile, 'aws_session_token', session_token)

        with open(self.creds_file, 'w+') as configfile:
            config.write(configfile)
        print("Temporary credentials written to profile: %s" % profile)
        print("Invoke using: aws --profile %s <service> <command>" % profile)
项目:cellranger    作者:10XGenomics    | 项目源码 | 文件源码
def write_stage_alerts(stage, path, alerts_file="alerts.list"):
    alerts = load_alerts()
    out_file = os.path.join(path, alerts_file)
    if not os.path.exists(path):
        os.makedirs(path)
    out_handle = open(out_file, "w")
    keys = ["metric", "threshold", "compare", "action", "message"]
    if not alerts.has_key(stage):
        martian.throw("No alerts found for stage %s" % stage)
    for alert in alerts[stage]:
        out_handle.write("#\n")
        out_handle.write(alert["metric"]+"\n")
        out_handle.write(str(alert["threshold"])+"\n")
        out_handle.write(alert["compare"]+"\n")
        out_handle.write(alert["action"]+"\n")
        out_handle.write(alert["message"]+"\n") 
    out_handle.close()
项目:acbs    作者:AOSC-Dev    | 项目源码 | 文件源码
def init_env(tree=['default']):
    dump_loc = '/var/cache/acbs/tarballs/'
    tmp_loc = '/var/cache/acbs/build/'
    print("----- Welcome to ACBS - %s -----" % (acbs_version))
    try:
        if not os.path.isdir(dump_loc):
            os.makedirs(dump_loc)
        if not os.path.isdir(tmp_loc):
            os.makedirs(tmp_loc)
    except:
        raise IOError('\033[93mFailed to make work directory\033[0m!')
    if os.path.exists('/etc/acbs_forest.conf'):
        tree_loc = parse_acbs_conf(tree[0])
        if tree_loc is not None:
            os.chdir(tree_loc)
        else:
            sys.exit(1)
    else:
        if not write_acbs_conf():
            sys.exit(1)
    return
项目:yt-browser    作者:juanfgs    | 项目源码 | 文件源码
def __init__(self):
        self.config_dir = os.path.expanduser("~/.config/ytbrowser/")
        self.defaults['format'] = "mkv"
        self.defaults['quality'] = "bestvideo"
        self.defaults['preferredcodec'] = "mp3"
        self.defaults['preferredquality'] = 192
        self.defaults['developerKey'] = "AIzaSyDFuK00HWV0fd1VMb17R8GghRVf_iQx9uk"
        self.defaults['apiServiceName'] = "youtube"
        self.defaults['apiVersion'] = "v3"

        if not os.path.exists(self.config_dir):
            os.makedirs(self.config_dir)

        if not os.path.exists(self.config_dir + "config.yml"):
            open(self.config_dir + "config.yml", "a").close()

        with open(self.config_dir + "config.yml", 'r') as ymlfile:
                self.user_settings = yaml.load(ymlfile)

        if self.user_settings is None:
            self.user_settings = {}
项目:pycos    作者:pgiri    | 项目源码 | 文件源码
def dest_path(self, path):
        path = os.path.normpath(path)
        if not path.startswith(self.__dest_path_prefix):
            path = os.path.join(self.__dest_path_prefix,
                                os.path.splitdrive(path)[1].lstrip(os.sep))
        try:
            os.makedirs(path)
        except OSError as exc:
            if exc.errno != errno.EEXIST:
                raise
        self.__dest_path = path
项目:pycos    作者:pgiri    | 项目源码 | 文件源码
def dest_path(self, path):
        path = os.path.normpath(path)
        if not path.startswith(self.__dest_path_prefix):
            path = os.path.join(self.__dest_path_prefix,
                                os.path.splitdrive(path)[1].lstrip(os.sep))
        try:
            os.makedirs(path)
        except OSError as exc:
            if exc.errno != errno.EEXIST:
                raise
        self.__dest_path = path
项目:Cat-Segmentation    作者:ardamavi    | 项目源码 | 文件源码
def get_dataset(dataset_path='Data/Train_Data'):
    # Getting all data from data path:
    try:
        X = np.load('Data/npy_train_data/X.npy')
        Y = np.load('Data/npy_train_data/Y.npy')
    except:
        inputs_path = dataset_path+'/input'
        images = listdir(inputs_path) # Geting images
        X = []
        Y = []
        for img in images:
            img_path = inputs_path+'/'+img

            x_img = get_img(img_path).astype('float32').reshape(64, 64, 3)
            x_img /= 255.

            y_img = get_img(img_path.replace('input/', 'mask/mask_')).astype('float32').reshape(64, 64, 1)
            y_img /= 255.

            X.append(x_img)
            Y.append(y_img)
        X = np.array(X)
        Y = np.array(Y)
        # Create dateset:
        if not os.path.exists('Data/npy_train_data/'):
            os.makedirs('Data/npy_train_data/')
        np.save('Data/npy_train_data/X.npy', X)
        np.save('Data/npy_train_data/Y.npy', Y)
    X, X_test, Y, Y_test = train_test_split(X, Y, test_size=0.1, random_state=42)
    return X, X_test, Y, Y_test
项目:Cat-Segmentation    作者:ardamavi    | 项目源码 | 文件源码
def train_model(model, X, X_test, Y, Y_test):
    if not os.path.exists('Data/Checkpoints/'):
        os.makedirs('Data/Checkpoints/')
    checkpoints = []
    checkpoints.append(ModelCheckpoint('Data/Checkpoints/best_weights.h5', monitor='val_loss', verbose=0, save_best_only=True, save_weights_only=True, mode='auto', period=1))
    checkpoints.append(TensorBoard(log_dir='Data/Checkpoints/./logs', histogram_freq=0, write_graph=True, write_images=False, embeddings_freq=0, embeddings_layer_names=None, embeddings_metadata=None))

    model.fit(X, Y, batch_size=batch_size, epochs=epochs, validation_data=(X_test, Y_test), shuffle=True, callbacks=checkpoints)

    return model
项目:Cat-Segmentation    作者:ardamavi    | 项目源码 | 文件源码
def save_model(model):
    if not os.path.exists('Data/Model/'):
        os.makedirs('Data/Model/')
    model_json = model.to_json()
    with open("Data/Model/model.json", "w") as model_file:
        model_file.write(model_json)
    # serialize weights to HDF5
    model.save_weights("Data/Model/weights.h5")
    print('Model and weights saved')
    return
项目:charm-plumgrid-gateway    作者:openstack    | 项目源码 | 文件源码
def sync_pyfile(src, dest):
    src = src + '.py'
    src_dir = os.path.dirname(src)
    logging.info('Syncing pyfile: %s -> %s.' % (src, dest))
    if not os.path.exists(dest):
        os.makedirs(dest)
    shutil.copy(src, dest)
    if os.path.isfile(os.path.join(src_dir, '__init__.py')):
        shutil.copy(os.path.join(src_dir, '__init__.py'),
                    dest)
    ensure_init(dest)
项目:AutoML5    作者:djajetic    | 项目源码 | 文件源码
def __init__(self, basename, input_dir, verbose=False, replace_missing=True, filter_features=False):
        '''Constructor'''
        self.use_pickle = False # Turn this to true to save data as pickle (inefficient)
        self.basename = basename
        if basename in input_dir:
            self.input_dir = input_dir 
        else:
            self.input_dir = input_dir + "/" + basename + "/"   
        if self.use_pickle:
            if os.path.exists ("tmp"):
                self.tmp_dir = "tmp"
            elif os.path.exists ("../tmp"):
                self.tmp_dir = "../tmp" 
            else:
                os.makedirs("tmp")
                self.tmp_dir = "tmp"
        info_file = os.path.join (self.input_dir, basename + '_public.info')
        self.info = {}
        self.getInfo (info_file)
            self.feat_type = self.loadType (os.path.join(self.input_dir, basename + '_feat.type'), verbose=verbose)
        self.data = {}  
        Xtr = self.loadData (os.path.join(self.input_dir, basename + '_train.data'), verbose=verbose, replace_missing=replace_missing)
        Ytr = self.loadLabel (os.path.join(self.input_dir, basename + '_train.solution'), verbose=verbose)
        Xva = self.loadData (os.path.join(self.input_dir, basename + '_valid.data'), verbose=verbose, replace_missing=replace_missing)
        Xte = self.loadData (os.path.join(self.input_dir, basename + '_test.data'), verbose=verbose, replace_missing=replace_missing)
           # Normally, feature selection should be done as part of a pipeline.
           # However, here we do it as a preprocessing for efficiency reason
        idx=[]
        if filter_features: # add hoc feature selection, for the example...
            fn = min(Xtr.shape[1], 1000)       
            idx = data_converter.tp_filter(Xtr, Ytr, feat_num=fn, verbose=verbose)
            Xtr = Xtr[:,idx]
            Xva = Xva[:,idx]
            Xte = Xte[:,idx]  
        self.feat_idx = np.array(idx).ravel()
        self.data['X_train'] = Xtr
        self.data['Y_train'] = Ytr
        self.data['X_valid'] = Xva
        self.data['X_test'] = Xte
项目:AutoML5    作者:djajetic    | 项目源码 | 文件源码
def mkdir(d):
    if not os.path.exists(d):
        os.makedirs(d)
项目:AutoML5    作者:djajetic    | 项目源码 | 文件源码
def mkdir(d):
    ''' Create a new directory'''
    if not os.path.exists(d):
        os.makedirs(d)
项目:hesperides-cli    作者:voyages-sncf-technologies    | 项目源码 | 文件源码
def _create_home(self):
        if not os.path.isdir(self._HOME + '/' + self._CONFIG_DIR):
            os.makedirs(self._HOME + '/' + self._CONFIG_DIR)
        with os.fdopen(os.open(self._HOME + '/' + self._CONFIG_DIR + '/' + self._CONFIG_FILE_NAME,
                               os.O_WRONLY | os.O_CREAT, 0o600), 'w'):
            pass
        with os.fdopen(os.open(self._HOME + '/' + self._CONFIG_DIR + '/' + self._CREDENTIALS_FILE_NAME,
                               os.O_WRONLY | os.O_CREAT, 0o600), 'w'):
            pass
项目:Stitch    作者:nathanlopez    | 项目源码 | 文件源码
def gen_makeself(conf_dir,alias):
    mkself_tmp = os.path.join(conf_dir,'tmp')
    conf_mkself = os.path.join(conf_dir,'Installers')
    if not os.path.exists(conf_mkself):
        os.makedirs(conf_mkself)
    if not os.path.exists(mkself_tmp):
        os.makedirs(mkself_tmp)
    if sys.platform.startswith('darwin'):
        alias_app = os.path.join(conf_dir,'{}.app'.format(alias))
        if os.path.exists(alias_app):
            run_command('cp -R {} {}'.format(alias_app,mkself_tmp))
            gen_osx_plist(alias,mkself_tmp)
            gen_st_setup(alias,mkself_tmp)
            mkself_installer = 'bash "{}" "{}" "{}/{}_Installer" "Stitch" bash st_setup.sh'.format(mkself_exe, mkself_tmp, conf_mkself,alias)
            st_log.info(mkself_installer)
            st_log.info(run_command(mkself_installer))
            shutil.rmtree(mkself_tmp)
    else:
        binry_dir = os.path.join(conf_dir,'Binaries')
        alias_dir = os.path.join(binry_dir, alias)
        if os.path.exists(alias_dir):
            run_command('cp -R {} {}'.format(alias_dir,mkself_tmp))
            gen_lnx_daemon(alias,mkself_tmp)
            gen_st_setup(alias,mkself_tmp)
            mkself_installer = 'bash "{}" "{}" "{}/{}_Installer" "Stitch" bash st_setup.sh'.format(mkself_exe, mkself_tmp, conf_mkself,alias)
            st_log.info(mkself_installer)
            st_log.info(run_command(mkself_installer))
            shutil.rmtree(mkself_tmp)
项目:lang-reps    作者:chaitanyamalaviya    | 项目源码 | 文件源码
def save(self, path):
        if not os.path.exists(path): os.makedirs(path)
        self.src_vocab.save(path+"/vocab.src")
        self.tgt_vocab.save(path+"/vocab.tgt")
        self.m.save(path+"/params")
        with open(path+"/args", "w") as f: pickle.dump(self.args, f)
项目:lang-reps    作者:chaitanyamalaviya    | 项目源码 | 文件源码
def write_combined_file(lang_code, all_lang_paths, all_en_paths):
  src_combined_filename = "train" + "_" + lang_code + "_en."+ lang_code + ".txt"
  tgt_combined_filename = "train" + "_" + lang_code + "_en.en" + ".txt"

  if not os.path.exists(output_dir + lang_code):
    os.makedirs(output_dir+lang_code)

  write_lang = []
  write_en = []

  for corp in all_lang_paths:
    for filename in corp:
      with open(filename) as f:
        doc = f.read()
        write_lang.append(doc)

  for corp in all_en_paths:
    for filename in corp:
      with open(filename) as f:
    doc = f.read()
        write_en.append(doc)

  for doc1, doc2 in zip(write_lang, write_en):
    if len(doc1.split("\n"))!=len(doc2.split("\n")):
      continue
    else:
      with open(output_dir + lang_code + "/" + src_combined_filename, 'a') as wf:
        wf.write(doc1)
      with open(output_dir + lang_code + "/" + tgt_combined_filename, 'a') as wf:
        wf.write(doc2)
项目:flora    作者:Lamden    | 项目源码 | 文件源码
def new_dir(self, path):
        try:
            os.makedirs(path)
        except Exception as e:
            raise e