]> git.0d.be Git - django-panik-emissions.git/blob - emissions/management/commands/load-from-drupal-json.py
add command to import from (custom) drupal export
[django-panik-emissions.git] / emissions / management / commands / load-from-drupal-json.py
1 import datetime
2 import isodate
3 import json
4 import requests
5
6 from django.conf import settings
7 from django.core.files import File
8 from django.core.files.base import ContentFile
9 from django.core.files.storage import default_storage
10 from django.core.management.base import BaseCommand, CommandError
11 from django.utils.text import slugify
12 from django.utils.timezone import make_naive
13
14 from ...models import Emission, Episode, Diffusion
15
16
17 class Command(BaseCommand):
18     def add_arguments(self, parser):
19         parser.add_argument('filename', metavar='FILENAME', type=str,
20                 help='name of file to import')
21
22     def handle(self, filename, verbosity, **options):
23         self.verbose = (verbosity > 1)
24         for episode_data in json.load(open(filename)).get('episodes').values():
25             if not episode_data.get('title'):
26                 continue
27             if not episode_data.get('emission'):
28                 continue
29             if not episode_data.get('text') and not episode_data.get('sound'):
30                 continue
31             emission, created = Emission.objects.get_or_create(
32                     slug=slugify(episode_data['emission']),
33                     defaults={'title': episode_data['emission']})
34             if created:
35                 emission.save()
36             episode, created = Episode.objects.get_or_create(
37                     slug=episode_data['path'].split('/')[-1],
38                     emission=emission)
39             episode.title = episode_data['title']
40             episode.text = episode_data['text']
41             episode.creation_timestamp = isodate.parse_datetime(
42                     episode_data['pub_date'].replace(' ', 'T', 1).replace(' ', ''))
43             if not settings.USE_TZ:
44                 episode.creation_timestamp = make_naive(episode.creation_timestamp)
45             start_datetime = datetime.datetime.combine(
46                     datetime.datetime.strptime(episode_data['start_date'], '%d/%m/%Y').date(),
47                     datetime.datetime.strptime(episode_data['start_time'], '%H:%M').time())
48             if episode_data['end_time']:
49                 end_datetime = datetime.datetime.combine(
50                         start_datetime.date(),
51                         datetime.datetime.strptime(episode_data['end_time'], '%H:%M').time())
52             else:
53                 end_datetime = start_datetime  # fake
54             if end_datetime < start_datetime:
55                 end_datetime = end_datetime + datetime.timedelta(days=1)
56             episode.duration = (end_datetime - start_datetime).seconds // 60
57             episode.save()
58             if not episode.image and episode_data.get('image'):
59                 orig_path = 'images/%s/%s' % (emission.slug, episode_data.get('image').split('/')[-1])
60                 if default_storage.exists(orig_path):
61                     path = orig_path
62                 else:
63                     path = default_storage.save(
64                             'images/%s/%s' % (
65                                 emission.slug,
66                                 episode_data.get('image').split('/')[-1]),
67                             ContentFile(requests.get(episode_data.get('image')).content))
68                 episode.image = default_storage.open(path)
69                 try:
70                     episode.save()
71                 except OSError:  # OSError: cannot identify image file
72                     episode.image = None
73                     episode.save()
74             if episode.diffusion_set.count() == 0:
75                 diffusion = Diffusion(episode=episode)
76                 diffusion.datetime = start_datetime
77                 diffusion.save()