initial draft for follows/following pagination
This commit is contained in:
parent
fbe7af3d56
commit
8ce513ed09
7 changed files with 34 additions and 13 deletions
|
@ -247,15 +247,21 @@ const fetchUser = ({id, credentials}) => {
|
|||
.then((data) => parseUser(data))
|
||||
}
|
||||
|
||||
const fetchFriends = ({id, credentials}) => {
|
||||
const fetchFriends = ({id, page, credentials}) => {
|
||||
let url = `${FRIENDS_URL}?user_id=${id}`
|
||||
if (page) {
|
||||
url = url + `&page=${page}`
|
||||
}
|
||||
return fetch(url, { headers: authHeaders(credentials) })
|
||||
.then((data) => data.json())
|
||||
.then((data) => data.map(parseUser))
|
||||
}
|
||||
|
||||
const fetchFollowers = ({id, credentials}) => {
|
||||
const fetchFollowers = ({id, page, credentials}) => {
|
||||
let url = `${FOLLOWERS_URL}?user_id=${id}`
|
||||
if (page) {
|
||||
url = url + `&page=${page}`
|
||||
}
|
||||
return fetch(url, { headers: authHeaders(credentials) })
|
||||
.then((data) => data.json())
|
||||
.then((data) => data.map(parseUser))
|
||||
|
|
|
@ -10,12 +10,12 @@ const backendInteractorService = (credentials) => {
|
|||
return apiService.fetchConversation({id, credentials})
|
||||
}
|
||||
|
||||
const fetchFriends = ({id}) => {
|
||||
return apiService.fetchFriends({id, credentials})
|
||||
const fetchFriends = ({id, page}) => {
|
||||
return apiService.fetchFriends({id, page, credentials})
|
||||
}
|
||||
|
||||
const fetchFollowers = ({id}) => {
|
||||
return apiService.fetchFollowers({id, credentials})
|
||||
const fetchFollowers = ({id, page}) => {
|
||||
return apiService.fetchFollowers({id, page, credentials})
|
||||
}
|
||||
|
||||
const fetchAllFollowing = ({username}) => {
|
||||
|
|
|
@ -113,6 +113,8 @@ export const parseUser = (data) => {
|
|||
output.locked = data.locked
|
||||
output.followers_count = data.followers_count
|
||||
output.statuses_count = data.statuses_count
|
||||
output.friends = []
|
||||
output.followers = []
|
||||
|
||||
return output
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue