fix: regression on gzipped filenames from 3dcbee5a

With the 3dcbee5a the filenames inside the .gz archives contained .tmp
at the end. This fixes those by using Gzip Class constructor instead of
the gzip.open method.

Signed-off-by: Leonidas Spyropoulos <artafinde@archlinux.org>
This commit is contained in:
Leonidas Spyropoulos 2022-10-31 14:43:31 +00:00
parent 6ee34ab3cb
commit 286834bab1
No known key found for this signature in database
GPG key ID: 59E43E106B247368

View file

@ -242,8 +242,10 @@ def _main():
tmp_meta = f"{META}.tmp" tmp_meta = f"{META}.tmp"
tmp_metaext = f"{META_EXT}.tmp" tmp_metaext = f"{META_EXT}.tmp"
gzips = { gzips = {
"packages": gzip.open(tmp_packages, "wt"), "packages": gzip.GzipFile(
"meta": gzip.open(tmp_meta, "wb"), filename=PACKAGES, mode="wb", fileobj=open(tmp_packages, "wb")
),
"meta": gzip.GzipFile(filename=META, mode="wb", fileobj=open(tmp_meta, "wb")),
} }
# Append list opening to the metafile. # Append list opening to the metafile.
@ -252,7 +254,9 @@ def _main():
# Produce packages.gz + packages-meta-ext-v1.json.gz # Produce packages.gz + packages-meta-ext-v1.json.gz
extended = False extended = False
if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS: if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS:
gzips["meta_ext"] = gzip.open(tmp_metaext, "wb") gzips["meta_ext"] = gzip.GzipFile(
filename=META_EXT, mode="wb", fileobj=open(tmp_metaext, "wb")
)
# Append list opening to the meta_ext file. # Append list opening to the meta_ext file.
gzips.get("meta_ext").write(b"[\n") gzips.get("meta_ext").write(b"[\n")
f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1]) f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1])
@ -261,9 +265,10 @@ def _main():
results = query.all() results = query.all()
n = len(results) - 1 n = len(results) - 1
with io.TextIOWrapper(gzips.get("packages")) as p:
for i, result in enumerate(results): for i, result in enumerate(results):
# Append to packages.gz. # Append to packages.gz.
gzips.get("packages").write(f"{result.Name}\n") p.write(f"{result.Name}\n")
# Construct our result JSON dictionary. # Construct our result JSON dictionary.
item = as_dict(result) item = as_dict(result)
@ -295,13 +300,17 @@ def _main():
# Produce pkgbase.gz # Produce pkgbase.gz
query = db.query(PackageBase.Name).filter(PackageBase.PackagerUID.isnot(None)).all() query = db.query(PackageBase.Name).filter(PackageBase.PackagerUID.isnot(None)).all()
tmp_pkgbase = f"{PKGBASE}.tmp" tmp_pkgbase = f"{PKGBASE}.tmp"
with gzip.open(tmp_pkgbase, "wt") as f: pkgbase_gzip = gzip.GzipFile(
filename=PKGBASE, mode="wb", fileobj=open(tmp_pkgbase, "wb")
)
with io.TextIOWrapper(pkgbase_gzip) as f:
f.writelines([f"{base.Name}\n" for i, base in enumerate(query)]) f.writelines([f"{base.Name}\n" for i, base in enumerate(query)])
# Produce users.gz # Produce users.gz
query = db.query(User.Username).all() query = db.query(User.Username).all()
tmp_users = f"{USERS}.tmp" tmp_users = f"{USERS}.tmp"
with gzip.open(tmp_users, "wt") as f: users_gzip = gzip.GzipFile(filename=USERS, mode="wb", fileobj=open(tmp_users, "wb"))
with io.TextIOWrapper(users_gzip) as f:
f.writelines([f"{user.Username}\n" for i, user in enumerate(query)]) f.writelines([f"{user.Username}\n" for i, user in enumerate(query)])
files = [ files = [