summaryrefslogtreecommitdiffstats
path: root/feeds.py
blob: 0f7fa5d70037c51254751b9d6a352128e71b69db (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
from datetime import datetime, time
from pytz import utc

from django.contrib.sites.models import Site
from django.contrib.syndication.views import Feed
from django.db import connection
from django.db.models import Q
from django.utils.feedgenerator import Rss201rev2Feed
from django.views.decorators.http import condition

from main.models import Arch, Repo, Package
from news.models import News
from releng.models import Release


class BatchWritesWrapper(object):
    def __init__(self, outfile):
        self.outfile = outfile
        self.buf = []

    def write(self, s):
        buf = self.buf
        buf.append(s)
        if len(buf) >= 40:
            self.outfile.write(''.join(buf))
            self.buf = []

    def flush(self):
        self.outfile.write(''.join(self.buf))
        self.outfile.flush()


class FasterRssFeed(Rss201rev2Feed):
    def write(self, outfile, encoding):
        '''
        Batch the underlying 'write' calls on the outfile because Python's
        default saxutils XmlGenerator is a POS that insists on unbuffered
        write/flush calls. This sucks when it is making 1-byte calls to write
        '>' closing tags and over 1600 write calls in our package feed.
        '''
        wrapper = BatchWritesWrapper(outfile)
        super(FasterRssFeed, self).write(wrapper, encoding)
        wrapper.flush()


def package_last_modified(request, *args, **kwargs):
    cursor = connection.cursor()
    cursor.execute("SELECT MAX(last_update) FROM packages")
    return cursor.fetchone()[0]


class PackageFeed(Feed):
    feed_type = FasterRssFeed

    link = '/packages/'

    def __call__(self, request, *args, **kwargs):
        wrapper = condition(last_modified_func=package_last_modified)
        return wrapper(super(PackageFeed, self).__call__)(request, *args, **kwargs)

    __name__ = 'package_feed'

    def get_object(self, request, arch='', repo=''):
        obj = dict()
        qs = Package.objects.normal().order_by('-last_update')

        if arch != '':
            # feed for a single arch, also include 'any' packages everywhere
            a = Arch.objects.get(name=arch)
            qs = qs.filter(Q(arch=a) | Q(arch__agnostic=True))
            obj['arch'] = a
        if repo != '':
            # feed for a single arch AND repo
            r = Repo.objects.get(name__iexact=repo)
            qs = qs.filter(repo=r)
            obj['repo'] = r
        else:
            qs = qs.filter(repo__staging=False)
        obj['qs'] = qs[:50]
        return obj

    def title(self, obj):
        s = 'Arch Linux: Recent package updates'
        if 'repo' in obj and 'arch' in obj:
            s += ' (%s [%s])' % (obj['arch'].name, obj['repo'].name.lower())
        elif 'repo' in obj:
            s += ' [%s]' % (obj['repo'].name.lower())
        elif 'arch' in obj:
            s += ' (%s)' % (obj['arch'].name)
        return s

    def description(self, obj):
        s = 'Recently updated packages in the Arch Linux package repositories'
        if 'arch' in obj:
            s += ' for the \'%s\' architecture' % obj['arch'].name.lower()
            if not obj['arch'].agnostic:
                s += ' (including \'any\' packages)'
        if 'repo' in obj:
            s += ' in the [%s] repository' % obj['repo'].name.lower()
        s += '.'
        return s

    subtitle = description

    def items(self, obj):
        return obj['qs']

    item_guid_is_permalink = False

    def item_guid(self, item):
        # http://diveintomark.org/archives/2004/05/28/howto-atom-id
        date = item.last_update
        return 'tag:%s,%s:%s%s' % (Site.objects.get_current().domain,
                date.strftime('%Y-%m-%d'), item.get_absolute_url(),
                date.strftime('%Y%m%d%H%M'))

    def item_pubdate(self, item):
        return item.last_update

    def item_title(self, item):
        return '%s %s %s' % (item.pkgname, item.full_version, item.arch.name)

    def item_description(self, item):
        return item.pkgdesc

    def item_categories(self, item):
        return (item.repo.name, item.arch.name)


def news_last_modified(request, *args, **kwargs):
    cursor = connection.cursor()
    cursor.execute("SELECT MAX(last_modified) FROM news")
    return cursor.fetchone()[0]


class NewsFeed(Feed):
    feed_type = FasterRssFeed

    title = 'Arch Linux: Recent news updates'
    link = '/news/'
    description = 'The latest and greatest news from the Arch Linux distribution.'
    subtitle = description

    def __call__(self, request, *args, **kwargs):
        wrapper = condition(last_modified_func=news_last_modified)
        return wrapper(super(NewsFeed, self).__call__)(request, *args, **kwargs)

    __name__ = 'news_feed'

    def items(self):
        return News.objects.select_related('author').order_by(
                '-postdate', '-id')[:10]

    item_guid_is_permalink = False

    def item_guid(self, item):
        return item.guid

    def item_pubdate(self, item):
        return item.postdate

    def item_updateddate(self, item):
        return item.last_modified

    def item_author_name(self, item):
        return item.author.get_full_name()

    def item_title(self, item):
        return item.title

    def item_description(self, item):
        return item.html()


class ReleaseFeed(Feed):
    feed_type = FasterRssFeed

    title = 'Arch Linux: Releases'
    link = '/download/'
    description = 'Release ISOs'
    subtitle = description

    __name__ = 'release_feed'

    def items(self):
        return Release.objects.filter(available=True)[:10]

    def item_title(self, item):
        return item.version

    def item_description(self, item):
        return item.info_html()

    def item_pubdate(self, item):
        return datetime.combine(item.release_date, time()).replace(tzinfo=utc)

    def item_updateddate(self, item):
        return item.last_modified

    item_guid_is_permalink = False

    def item_guid(self, item):
        # http://diveintomark.org/archives/2004/05/28/howto-atom-id
        date = item.release_date
        return 'tag:%s,%s:%s' % (Site.objects.get_current().domain,
                date.strftime('%Y-%m-%d'), item.get_absolute_url())

    def item_enclosure_url(self, item):
        domain = Site.objects.get_current().domain
        proto = 'https'
        return "%s://%s/%s.torrent" % (proto, domain, item.iso_url())

    def item_enclosure_length(self, item):
        if item.torrent_data:
            torrent = item.torrent()
            return torrent['file_length'] or ""
        return ""

    item_enclosure_mime_type = 'application/x-bittorrent'

# vim: set ts=4 sw=4 et: