Commit 4adcced7 authored by okhin's avatar okhin 🚴

Merge branch 'master' of git.laquadrature.net:memopol/memopol

parents 5a0551d3 e293e3b9
......@@ -380,7 +380,10 @@ def main(stream=None):
for data in ijson.items(stream or sys.stdin, ''):
for rep in data:
if rep['chambre'] == 'AN':
an_importer.manage_rep(rep)
elif rep['chambre'] == 'SEN':
sen_importer.manage_rep(rep)
try:
if rep['chambre'] == 'AN':
an_importer.manage_rep(rep)
elif rep['chambre'] == 'SEN':
sen_importer.manage_rep(rep)
except Exception:
logger.exception('error trying to import rep %s', str(rep))
......@@ -420,7 +420,11 @@ def main(stream=None):
GenericImporter.pre_import(importer)
for data in ijson.items(stream or sys.stdin, 'item'):
importer.manage_mep(data)
try:
importer.manage_mep(data)
except Exception:
logger.exception('error trying to import rep %s', str(data))
# Commenting for now, it's a bit dangerous, if a json file was corrupt it
# would drop valid data !
# importer.post_import()
......@@ -45,44 +45,48 @@ class PositionImporter:
return rep
def import_row(self, row):
if len(row['date']) == 0:
if len(row['url']) == 0:
row['date'] = '2010-01-01'
row['url'] = '/'
else:
row['date'] = position_dates.get(row['url'], None)
if row['date'] is None:
logger.warn('Dateless position for %s %s on URL %s' %
(row['first_name'], row['last_name'], row['url']))
return False
rep = self.get_rep(row['first_name'], row['last_name'])
if rep is None:
logger.warn('Could not find rep %s %s' % (row['first_name'],
row['last_name']))
return False
text = re.sub('(^<p>|</p>$)', '', row['content'])
if row['title'] is not None and len(row['title']) > 0:
text = '%s\n%s' % (row['title'], text)
try:
position = Position.objects.get(representative=rep,
link=row['url'])
except Position.DoesNotExist:
position = Position(
representative=rep,
link=row['url'],
datetime=row['date'],
text=text,
published=True
)
position.save()
logger.info('Created position for %s %s on URL %s' % (
row['first_name'], row['last_name'], row['url']))
return True
if len(row['date']) == 0:
if len(row['url']) == 0:
row['date'] = '2010-01-01'
row['url'] = '/'
else:
row['date'] = position_dates.get(row['url'], None)
if row['date'] is None:
logger.warn('Dateless position for %s %s on URL %s' %
(row['first_name'], row['last_name'], row['url']))
return False
rep = self.get_rep(row['first_name'], row['last_name'])
if rep is None:
logger.warn('Could not find rep %s %s' % (row['first_name'],
row['last_name']))
return False
text = re.sub('(^<p>|</p>$)', '', row['content'])
if row['title'] is not None and len(row['title']) > 0:
text = '%s\n%s' % (row['title'], text)
try:
position = Position.objects.get(representative=rep,
link=row['url'])
except Position.DoesNotExist:
position = Position(
representative=rep,
link=row['url'],
datetime=row['date'],
text=text,
published=True
)
position.save()
logger.info('Created position for %s %s on URL %s' % (
row['first_name'], row['last_name'], row['url']))
return True
except Exception:
logger.exception('error trying to import position %s', str(row))
return False
def main(stream=None):
......
......@@ -54,41 +54,47 @@ class RecommendationImporter:
return None
def import_row(self, row):
dossier = self.get_dossier(row['title'])
if dossier is None:
logger.warn('No dossier "%s"' % row['title'])
return False
try:
dossier = self.get_dossier(row['title'])
if dossier is None:
logger.warn('No dossier "%s"' % row['title'])
return False
proposal = self.get_proposal(dossier, row['part'])
if proposal is None:
logger.warn('No proposal "%s" for dossier %s (%d): "%s"' % (
row['part'].decode('utf-8'), dossier.reference, dossier.pk,
row['title']))
return False
weight = int(row['weight']) * int(row['ponderation'])
descr = row['description'].strip()
if len(descr) == 0:
descr = '%s on %s' % (row['part'], dossier.reference)
proposal = self.get_proposal(dossier, row['part'])
if proposal is None:
logger.warn('No proposal "%s" for dossier %s (%d): "%s"' % (
row['part'].decode('utf-8'), dossier.reference, dossier.pk,
row['title']))
try:
recom = Recommendation.objects.get(proposal=proposal)
except Recommendation.DoesNotExist:
recom = Recommendation(
proposal=proposal,
recommendation=row['recommendation'],
title=descr,
weight=weight
)
recom.save()
logger.info('Created recommendation with weight %s for %s: %s'
% (
weight,
row['title'],
row['part']
))
return True
except Exception:
logger.exception('error trying to import recommendation %s',
str(row))
return False
weight = int(row['weight']) * int(row['ponderation'])
descr = row['description'].strip()
if len(descr) == 0:
descr = '%s on %s' % (row['part'], dossier.reference)
try:
recom = Recommendation.objects.get(proposal=proposal)
except Recommendation.DoesNotExist:
recom = Recommendation(
proposal=proposal,
recommendation=row['recommendation'],
title=descr,
weight=weight
)
recom.save()
logger.info('Created recommendation with weight %s for %s: %s' % (
weight,
row['title'],
row['part']
))
return True
def main(stream=None):
"""
......
......@@ -134,4 +134,7 @@ def main(stream=None):
an = Chamber.objects.get(abbreviation='AN')
sen = Chamber.objects.get(abbreviation='SEN')
for data in ijson.items(stream or sys.stdin, 'item'):
parse_dossier_data(data, an, sen)
try:
parse_dossier_data(data, an, sen)
except Exception:
logger.exception('error trying to import dossier %s', str(data))
......@@ -106,4 +106,7 @@ def main(stream=None):
importer = ScrutinImporter()
for data in ijson.items(stream or sys.stdin, 'item'):
importer.parse_scrutin_data(data)
try:
importer.parse_scrutin_data(data)
except Exception:
logger.exception('error trying to import scrutin %s', str(data))
......@@ -126,6 +126,9 @@ def main(stream=None):
importer = VotesImporter()
for data in ijson.items(stream or sys.stdin, 'item'):
importer.parse_vote_data(data)
try:
importer.parse_vote_data(data)
except Exception:
logger.exception('error trying to import vote %s', str(data))
importer.update_totals()
......@@ -94,4 +94,7 @@ def main(stream=None):
ep = Chamber.objects.get(abbreviation='EP')
for data in ijson.items(stream or sys.stdin, 'item'):
parse_dossier_data(data, ep)
try:
parse_dossier_data(data, ep)
except Exception:
logger.exception('error trying to import dossier %s', str(data))
......@@ -255,4 +255,7 @@ def main(stream=None):
command.init_cache()
for vote_data in ijson.items(stream or sys.stdin, 'item'):
command.parse_vote_data(vote_data)
try:
command.parse_vote_data(vote_data)
except Exception:
logger.exception('error trying to import vote %s', str(vote_data))
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment