| |
@@ -211,6 +211,10 @@
|
| |
raw = gunzip(gz)
|
| |
accesses = parse_access_file(raw)
|
| |
|
| |
+ # Clean temporary files, we don't need them for the rest of the cycle
|
| |
+ for path in [gz, raw]:
|
| |
+ os.remove(path)
|
| |
+
|
| |
different_cdn = check_different_cdn_hostname(accesses, cdn_hostname)
|
| |
if different_cdn:
|
| |
log.debug("Skipping: %s (different hostname: %s)",
|
| |
@@ -228,10 +232,6 @@
|
| |
try_indefinitely=args.try_indefinitely)
|
| |
s3.delete_file(s3file)
|
| |
|
| |
- # Clean all temporary files
|
| |
- for path in [gz, raw]:
|
| |
- os.remove(path)
|
| |
-
|
| |
os.removedirs(tmp)
|
| |
|
| |
|
| |
Otherwise temporary files for skipped accesses aren't deleted and
flood our /tmp directory which runs out of space.