2
1

crawl.ts 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. import { ACTIVITY_PUB, JOB_REQUEST_TIMEOUT, WEBSERVER } from '../../initializers/constants'
  2. import { doRequest } from '../../helpers/requests'
  3. import { logger } from '../../helpers/logger'
  4. import * as Bluebird from 'bluebird'
  5. import { ActivityPubOrderedCollection } from '../../../shared/models/activitypub'
  6. import { parse } from 'url'
  7. type HandlerFunction<T> = (items: T[]) => (Promise<any> | Bluebird<any>)
  8. type CleanerFunction = (startedDate: Date) => (Promise<any> | Bluebird<any>)
  9. async function crawlCollectionPage <T> (uri: string, handler: HandlerFunction<T>, cleaner?: CleanerFunction) {
  10. logger.info('Crawling ActivityPub data on %s.', uri)
  11. const options = {
  12. method: 'GET',
  13. uri,
  14. json: true,
  15. activityPub: true,
  16. timeout: JOB_REQUEST_TIMEOUT
  17. }
  18. const startDate = new Date()
  19. const response = await doRequest<ActivityPubOrderedCollection<T>>(options)
  20. const firstBody = response.body
  21. let limit = ACTIVITY_PUB.FETCH_PAGE_LIMIT
  22. let i = 0
  23. let nextLink = firstBody.first
  24. while (nextLink && i < limit) {
  25. let body: any
  26. if (typeof nextLink === 'string') {
  27. // Don't crawl ourselves
  28. const remoteHost = parse(nextLink).host
  29. if (remoteHost === WEBSERVER.HOST) continue
  30. options.uri = nextLink
  31. const res = await doRequest<ActivityPubOrderedCollection<T>>(options)
  32. body = res.body
  33. } else {
  34. // nextLink is already the object we want
  35. body = nextLink
  36. }
  37. nextLink = body.next
  38. i++
  39. if (Array.isArray(body.orderedItems)) {
  40. const items = body.orderedItems
  41. logger.info('Processing %i ActivityPub items for %s.', items.length, options.uri)
  42. await handler(items)
  43. }
  44. }
  45. if (cleaner) await cleaner(startDate)
  46. }
  47. export {
  48. crawlCollectionPage
  49. }