| |
@@ -35,7 +35,6 @@
|
| |
'''
|
| |
|
| |
import argparse
|
| |
- import itertools
|
| |
import os
|
| |
import shutil
|
| |
import tempfile
|
| |
@@ -52,6 +51,7 @@
|
| |
from fedora_messaging.exceptions import PublishReturned, ConnectionException
|
| |
|
| |
|
| |
+ DB_FOLDER = '/var/tmp'
|
| |
KOJI_REPO = 'https://kojipkgs.fedoraproject.org/repos/'
|
| |
PKGDB2_URL = 'https://admin.fedoraproject.org/pkgdb/'
|
| |
DL_SERVER = 'https://dl.fedoraproject.org'
|
| |
@@ -61,6 +61,8 @@
|
| |
DL_VERIFY = True
|
| |
# Whether to publish to Fedora Messaging.
|
| |
PUBLISH_CHANGES = True
|
| |
+ # How long to wait between retries if processing failed.
|
| |
+ CRON_SLEEP = 30
|
| |
|
| |
|
| |
repomd_xml_namespace = {
|
| |
@@ -348,11 +350,10 @@
|
| |
return False
|
| |
|
| |
|
| |
- def process_repo(tupl):
|
| |
+ def process_repo(repo):
|
| |
''' Retrieve the repo metadata at the given url and store them using
|
| |
the provided name.
|
| |
'''
|
| |
- destfolder, repo = tupl
|
| |
url, name = repo
|
| |
repomd_url = url + '/repomd.xml'
|
| |
response = requests.get(repomd_url, verify=DL_VERIFY)
|
| |
@@ -399,7 +400,7 @@
|
| |
db = f'mdapi-{name}-other.sqlite'
|
| |
|
| |
# Have we downloaded this before? Did it change?
|
| |
- destfile = os.path.join(destfolder, db)
|
| |
+ destfile = os.path.join(DB_FOLDER, db)
|
| |
if not needs_update(destfile, shasum, shatype):
|
| |
print(f'{name.ljust(padding)} No change of {repomd_url}')
|
| |
continue
|
| |
@@ -435,18 +436,21 @@
|
| |
exec(compile(
|
| |
config_file.read(), configfile, 'exec'), CONFIG)
|
| |
|
| |
- if not os.path.exists(CONFIG.get('DB_FOLDER', '/var/tmp')):
|
| |
+ global DB_FOLDER
|
| |
+ DB_FOLDER = CONFIG.get('DB_FOLDER', DB_FOLDER)
|
| |
+ if not os.path.exists(DB_FOLDER):
|
| |
print('Could not find the configuration file')
|
| |
return 1
|
| |
|
| |
global PKGDB2_URL, KOJI_REPO, DL_SERVER, PKGDB2_VERIFY, DL_VERIFY
|
| |
- global PUBLISH_CHANGES
|
| |
+ global PUBLISH_CHANGES, CRON_SLEEP
|
| |
PKGDB2_URL = CONFIG.get('PKGDB2_URL', PKGDB2_URL)
|
| |
KOJI_REPO = CONFIG.get('KOJI_REPO', KOJI_REPO)
|
| |
DL_SERVER = CONFIG.get('DL_SERVER', DL_SERVER)
|
| |
PKGDB2_VERIFY = CONFIG.get('PKGDB2_VERIFY', PKGDB2_VERIFY)
|
| |
DL_VERIFY = CONFIG.get('DL_VERIFY', DL_VERIFY)
|
| |
PUBLISH_CHANGES = CONFIG.get('PUBLISH_CHANGES', PUBLISH_CHANGES)
|
| |
+ CRON_SLEEP = CONFIG.get('CRON_SLEEP', CRON_SLEEP)
|
| |
|
| |
if not DL_VERIFY or not PKGDB2_VERIFY:
|
| |
# Suppress urllib3's warning about insecure requests
|
| |
@@ -513,30 +517,24 @@
|
| |
|
| |
# In parallel
|
| |
# p = multiprocessing.Pool(10)
|
| |
- # p.map(process_repo, itertools.product(
|
| |
- # [CONFIG.get('DB_FOLDER', '/var/tmp')],
|
| |
- # repositories)
|
| |
- # )
|
| |
+ # p.map(process_repo, repositories)
|
| |
|
| |
# In serial
|
| |
- sleep_for = CONFIG.get('CRON_SLEEP', 30)
|
| |
- for t in itertools.product(
|
| |
- [CONFIG.get('DB_FOLDER', '/var/tmp')],
|
| |
- repositories):
|
| |
+ for repo in repositories:
|
| |
|
| |
loop = True
|
| |
cnt = 0
|
| |
while loop:
|
| |
cnt += 1
|
| |
try:
|
| |
- process_repo(t)
|
| |
+ process_repo(repo)
|
| |
loop = False
|
| |
except OSError:
|
| |
if cnt == 4:
|
| |
raise
|
| |
# Most often due to an invalid stream, so let's try a second time
|
| |
- time.sleep(sleep_for)
|
| |
- process_repo(t)
|
| |
+ time.sleep(CRON_SLEEP)
|
| |
+ process_repo(repo)
|
| |
|
| |
return 0
|
| |
|
| |
That is, make
DB_FOLDER
andCRON_SLEEP
into globals, and calldict.get
the same way.