1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
|
using System.Threading.Tasks;
using System.Collections.Generic;
using Classes;
using Database;
using Aggregator;
namespace Main.Jobs {
public static class AggregatorJob {
private static List<Config> configs = new List<Config> {
new Config {
Source = "BBC",
Title = 0,
Description = 1,
ArticleLink = 2,
ImageLink = 5,
PublishDate = 4
},
new Config {
Source = "Sky",
Title = 0,
Description = 2,
ArticleLink = 1,
ImageLink = 7,
PublishDate = 3
},
new Config {
Source = "ABC",
Title = 7,
Description = 11,
ArticleLink = 8,
ImageLink = 0,
PublishDate = 10
}
};
private static List<string> sites = new List<string> {
"http://feeds.bbci.co.uk/news/rss.xml",
"http://feeds.skynews.com/feeds/rss/world.xml",
"https://abcnews.go.com/abcnews/topstories"
};
public static async void Process(DatabaseContext context) {
var articles = await parseMultiple();
await context.InsertArticles(articles);
}
private static async Task<List<Article>> parseMultiple() {
var xmls = new List<string>();
foreach (string site in sites) {
xmls.Add(await Download.DownloadXML(site));
}
return ParserService.ParseMultiple(xmls.ToArray(), configs).ToList();
}
}
}
|