@@ -8029,6 +8029,126 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
80298029 fp.close()
80308030 return True
80318031
8032+ def AppendReadInFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8033+ return ReadInFileWithContentToList(infile, "auto", 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8034+
8035+ def AppendReadInMultipleFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8036+ return ReadInMultipleFileWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8037+
8038+ def AppendReadInMultipleFilesWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8039+ return ReadInMultipleFilesWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8040+
8041+ def AppendReadInFileWithContent(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False):
8042+ if(not hasattr(fp, "write")):
8043+ return False
8044+ GetDirList = AppendReadInFileWithContentToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, insaltkey, verbose)
8045+ numfiles = int(len(GetDirList))
8046+ fnumfiles = format(numfiles, 'x').lower()
8047+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
8048+ try:
8049+ fp.flush()
8050+ if(hasattr(os, "sync")):
8051+ os.fsync(fp.fileno())
8052+ except (io.UnsupportedOperation, AttributeError, OSError):
8053+ pass
8054+ for curfname in GetDirList:
8055+ tmpoutlist = curfname['fheaders']
8056+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, outsaltkey)
8057+ try:
8058+ fp.flush()
8059+ if(hasattr(os, "sync")):
8060+ os.fsync(fp.fileno())
8061+ except (io.UnsupportedOperation, AttributeError, OSError):
8062+ pass
8063+ return fp
8064+
8065+ def AppendReadInFileWithContentToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8066+ if(IsNestedDict(formatspecs) and fmttype=="auto" and
8067+ (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
8068+ get_in_ext = os.path.splitext(outfile)
8069+ tmpfmt = GetKeyByFormatExtension(get_in_ext[1], formatspecs=__file_format_multi_dict__)
8070+ if(tmpfmt is None and get_in_ext[1]!=""):
8071+ get_in_ext = os.path.splitext(get_in_ext[0])
8072+ tmpfmt = GetKeyByFormatExtension(get_in_ext[0], formatspecs=__file_format_multi_dict__)
8073+ if(tmpfmt is None):
8074+ fmttype = __file_format_default__
8075+ formatspecs = formatspecs[fmttype]
8076+ else:
8077+ fmttype = tmpfmt
8078+ formatspecs = formatspecs[tmpfmt]
8079+ elif(IsNestedDict(formatspecs) and fmttype in formatspecs):
8080+ formatspecs = formatspecs[fmttype]
8081+ elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
8082+ fmttype = __file_format_default__
8083+ formatspecs = formatspecs[fmttype]
8084+ if(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write")):
8085+ outfile = RemoveWindowsPath(outfile)
8086+ if(os.path.exists(outfile)):
8087+ try:
8088+ os.unlink(outfile)
8089+ except OSError:
8090+ pass
8091+ if(outfile == "-" or outfile is None):
8092+ verbose = False
8093+ fp = MkTempFile()
8094+ elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
8095+ fp = outfile
8096+ elif(re.findall(__upload_proto_support__, outfile)):
8097+ fp = MkTempFile()
8098+ else:
8099+ fbasename = os.path.splitext(outfile)[0]
8100+ fextname = os.path.splitext(outfile)[1]
8101+ if(not compresswholefile and fextname in outextlistwd):
8102+ compresswholefile = True
8103+ try:
8104+ fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
8105+ except PermissionError:
8106+ return False
8107+ AppendReadInFileWithContent(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, insaltkey, outsaltkey, verbose)
8108+ if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
8109+ fp = CompressOpenFileAlt(
8110+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8111+ try:
8112+ fp.flush()
8113+ if(hasattr(os, "sync")):
8114+ os.fsync(fp.fileno())
8115+ except (io.UnsupportedOperation, AttributeError, OSError):
8116+ pass
8117+ if(outfile == "-"):
8118+ fp.seek(0, 0)
8119+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
8120+ elif(outfile is None):
8121+ fp.seek(0, 0)
8122+ outvar = fp.read()
8123+ fp.close()
8124+ return outvar
8125+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
8126+ fp = CompressOpenFileAlt(
8127+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8128+ fp.seek(0, 0)
8129+ upload_file_to_internet_file(fp, outfile)
8130+ if(returnfp):
8131+ fp.seek(0, 0)
8132+ return fp
8133+ else:
8134+ fp.close()
8135+ return True
8136+
8137+ def AppendReadInFileWithContentToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8138+ if not isinstance(infiles, list):
8139+ infiles = [infiles]
8140+ returnout = False
8141+ for infileslist in infiles:
8142+ returnout = AppendReadInFileWithContentToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, insaltkey, outsaltkey, verbose, True)
8143+ if(not returnout):
8144+ break
8145+ else:
8146+ outfile = returnout
8147+ if(not returnfp and returnout):
8148+ returnout.close()
8149+ return True
8150+ return returnout
8151+
80328152def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
80338153 if(IsNestedDict(formatspecs) and fmttype=="auto" and
80348154 (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
0 commit comments