Cloud Defense Logo

Products

Solutions

Company

Book A Live Demo

Top 10 Examples of "feedparser in functional component" in Python

Dive into secure and efficient coding practices with our curated list of the top 10 examples showcasing 'feedparser' in functional components in Python. Our advanced machine learning engine meticulously scans each line of code, cross-referencing millions of open source libraries to ensure your implementation is not just functional, but also robust and secure. Elevate your React applications to new heights by mastering the art of handling side effects, API calls, and asynchronous operations with confidence and precision.

import feedparser, feedjack
from feedjack.models import (
	transaction_wrapper, transaction, IntegrityError,
	transaction_signaled_commit, transaction_signaled_rollback )
from feedjack.utils import command_logger_setup

import itertools as it, operator as op, functools as ft
from datetime import datetime, timedelta
from time import struct_time, sleep
from collections import defaultdict
from hashlib import sha256
import os, sys, types, hmac, argparse

USER_AGENT = 'Feedjack/{} ({}) feedparser/{}'.format(
	feedjack.__version__, feedjack.__url__, feedparser.__version__ )
SLOWFEED_WARNING = 10

import logging
logging.EXTRA = (logging.DEBUG + logging.INFO) // 2
logging.addLevelName(logging.EXTRA, 'EXTRA')
log = logging.getLogger('feedjack.update')
log.extra = ft.partial(log.log, logging.EXTRA) # should only be used here


ENTRY_NEW, ENTRY_UPDATED,\
	ENTRY_SAME, ENTRY_ERR = xrange(4)

entry_keys = (
	(ENTRY_NEW, 'new'),
	(ENTRY_UPDATED, 'updated'),
	(ENTRY_SAME, 'same'),
SessionManager.refresh_materialized_views(self._db)

        source = DataSource.lookup(self._db, self.datasource)
        metadata = Metadata(source)
        mock_api = MockNoveListAPI(self._db)
        metadata.recommendations = [same_author.license_pools[0].identifier]
        mock_api.setup(metadata)

        # A grouped feed is returned with all of the related books
        with self.request_context_with_library('/'):
            response = self.manager.work_controller.related(
                self.identifier.type, self.identifier.identifier,
                novelist_api=mock_api
            )
        eq_(200, response.status_code)
        feed = feedparser.parse(response.data)
        eq_(5, len(feed['entries']))

        def collection_link(entry):
            [link] = [l for l in entry['links'] if l['rel']=='collection']
            return link['title'], link['href']

        # This feed contains five books: one recommended,
        # one in the same series, and two by the same author.
        recommendations = []
        same_series = []
        same_contributor = []
        feeds_with_original_book = []
        for e in feed['entries']:
            for link in e['links']:
                if link['rel'] != 'collection':
                    continue
def load_cache(path):
    """Load a cached feedparser result."""
    jsonpath = path.replace('dat', 'json')
    if not os.path.exists(jsonpath):
        return None
    with open(jsonpath) as f:
        data = json.loads(f.read())
    ret = feedparser.FeedParserDict()
    ret.update(data)
    if 'updated_parsed' in data['feed'] and data['feed']['updated_parsed']:
        try:
            data['feed']['updated_parsed'] = time.gmtime(data['feed']['updated_parsed'])
        except: pass

    ret.feed = feedparser.FeedParserDict(data.get('feed', {}))
    entries = []
    for e in data.get('entries', []):
        if 'updated_parsed' in e and e['updated_parsed']:
            try:
                e['updated_parsed'] = time.gmtime(e['updated_parsed'])
            except: pass
        entries.append(feedparser.FeedParserDict(e))
    ret.entries = entries
    return ret
def test_None(self):
        self.assertTrue(feedparser.datetimes._parse_date(None) is None)
    'exc_cls', [feedparser.CharacterEncodingOverride, feedparser.NonXMLContentType]
)
def test_parse_survivable_feedparser_exceptions(
    monkeypatch, caplog, parse, data_dir, exc_cls
):
    """parse() should not reraise some acceptable feedparser exceptions."""

    old_feedparser_parse = feedparser.parse

    def feedparser_parse(*args, **kwargs):
        rv = old_feedparser_parse(*args, **kwargs)
        rv['bozo'] = 1
        rv['bozo_exception'] = exc_cls("whatever")
        return rv

    monkeypatch.setattr('feedparser.parse', feedparser_parse)
def test_catch_ValueError(self):
        """catch ValueError in Python 2.7 and up"""
        uri = 'http://bad]test/'
        value1 = feedparser.urls.make_safe_absolute_uri(uri)
        value2 = feedparser.urls.make_safe_absolute_uri(self.base, uri)
        swap = feedparser.urls.ACCEPTABLE_URI_SCHEMES
        feedparser.urls.ACCEPTABLE_URI_SCHEMES = ()
        value3 = feedparser.urls.make_safe_absolute_uri(self.base, uri)
        feedparser.urls.ACCEPTABLE_URI_SCHEMES = swap
        # Only Python 2.7 and up throw a ValueError, otherwise uri is returned
        self.assertTrue(value1 in (uri, ''))
        self.assertTrue(value2 in (uri, ''))
        self.assertTrue(value3 in (uri, ''))
def test_catch_ValueError(self):
        """catch ValueError in Python 2.7 and up"""
        uri = 'http://bad]test/'
        value1 = feedparser.urls.make_safe_absolute_uri(uri)
        value2 = feedparser.urls.make_safe_absolute_uri(self.base, uri)
        swap = feedparser.urls.ACCEPTABLE_URI_SCHEMES
        feedparser.urls.ACCEPTABLE_URI_SCHEMES = ()
        value3 = feedparser.urls.make_safe_absolute_uri(self.base, uri)
        feedparser.urls.ACCEPTABLE_URI_SCHEMES = swap
        # Only Python 2.7 and up throw a ValueError, otherwise uri is returned
        self.assertTrue(value1 in (uri, ''))
        self.assertTrue(value2 in (uri, ''))
        self.assertTrue(value3 in (uri, ''))
def test_catch_ValueError(self):
        """catch ValueError in Python 2.7 and up"""
        uri = 'http://bad]test/'
        value1 = feedparser.urls.make_safe_absolute_uri(uri)
        value2 = feedparser.urls.make_safe_absolute_uri(self.base, uri)
        swap = feedparser.urls.ACCEPTABLE_URI_SCHEMES
        feedparser.urls.ACCEPTABLE_URI_SCHEMES = ()
        value3 = feedparser.urls.make_safe_absolute_uri(self.base, uri)
        feedparser.urls.ACCEPTABLE_URI_SCHEMES = swap
        # Only Python 2.7 and up throw a ValueError, otherwise uri is returned
        self.assertTrue(value1 in (uri, ''))
        self.assertTrue(value2 in (uri, ''))
        self.assertTrue(value3 in (uri, ''))
def fn(self):
        value = feedparser.urls.make_safe_absolute_uri(self.base, rel)
        self.assertEqual(value, expect)
def add_feed(sender):
	url = console.input_alert('', "Enter RSS feed URL:", 'http://www.macstories.net/feed/')
	result = urlparse.urlparse(url)
	if result.netloc == '':
		url = 'http://www.macstories.net/feed/'

	indicator = ui.ActivityIndicator()
	indicator.center = navigation_view.center
	navigation_view.add_subview(indicator)
	indicator.bring_to_front()
	indicator.start()

	feed = feedparser.parse(url)
	title = feed['feed']['title']

	conn = sqlite3.connect('feeds.db')
	conn.execute('INSERT INTO feeds VALUES (?, ?)', (title, url))
	conn.commit()

	feeds = []
	for title, url in conn.execute('SELECT * FROM feeds ORDER BY title'):
		feeds.append({'title': title, 'url': url })

	conn.close()

	feed_list_controller.feeds = feeds
	table_view.reload()
	indicator.stop()
	navigation_view.remove_subview(indicator)

Is your System Free of Underlying Vulnerabilities?
Find Out Now