mirror of
https://github.com/zkitter/groups.git
synced 2026-01-09 13:08:01 -05:00
fix: fetch spaces by chunk of 20 (#9)
* fix: fetch spaces by chunk of 20 Need to page over results * add console logs
This commit is contained in:
@@ -1,8 +1,9 @@
|
||||
import { getGhGroup } from '../src'
|
||||
|
||||
const main = async () => {
|
||||
console.log('Fetching gh group (top 100 DAOs with >= 10_000 followers)...')
|
||||
const ghGroup = await getGhGroup()
|
||||
console.log(ghGroup)
|
||||
console.log(`Fetched gh group of size: ${ghGroup.length}`, ghGroup)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -27,7 +27,7 @@ const main = async () => {
|
||||
maxOrgs: argv.maxOrgs,
|
||||
minFollowers: argv.minFollowers,
|
||||
})()
|
||||
console.log(spaces)
|
||||
console.log({ spaces })
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -5,3 +5,5 @@ export const URLS = {
|
||||
SNAPSHOT_EXPLORE: `${BASE_URL}/api/explore`,
|
||||
SNAPSHOT_GQL: `${BASE_URL}/graphql`,
|
||||
}
|
||||
|
||||
export const CHUNK_SIZE = 20
|
||||
|
||||
@@ -13,7 +13,6 @@ export const getCommittersByOrg = async ({
|
||||
since: Date
|
||||
until: Date
|
||||
}) => {
|
||||
// console.log(parseDate(since), parseDate(until))
|
||||
ok(process.env.GH_PAT, 'GH_PAT is not defined')
|
||||
const res = await fetch(URLS.GH_SQL, {
|
||||
body: JSON.stringify({
|
||||
@@ -31,8 +30,9 @@ export const getCommittersByOrg = async ({
|
||||
},
|
||||
method: 'POST',
|
||||
})
|
||||
const repos = (await res.json()).data?.organization?.repositories?.nodes
|
||||
|
||||
const repos = (await res.json()).data.organization.repositories.nodes
|
||||
if (repos === undefined) return []
|
||||
|
||||
return [
|
||||
...new Set(
|
||||
|
||||
@@ -1,7 +1,36 @@
|
||||
import { URLS } from './constants'
|
||||
import { CHUNK_SIZE, URLS } from './constants'
|
||||
import { getSpaces } from './get-spaces'
|
||||
import spacesGqlQuery from './graphql/spaces-gql-query'
|
||||
|
||||
const split = (arr: string[]) => {
|
||||
const chunks = []
|
||||
for (let i = 0; i < arr.length; i += CHUNK_SIZE) {
|
||||
chunks.push(arr.slice(i, i + CHUNK_SIZE))
|
||||
}
|
||||
return chunks
|
||||
}
|
||||
|
||||
const getChunk = async (ids: string[]) => {
|
||||
const res = await fetch(URLS.SNAPSHOT_GQL, {
|
||||
body: JSON.stringify({
|
||||
query: spacesGqlQuery,
|
||||
variables: { id_in: ids },
|
||||
}),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
method: 'POST',
|
||||
})
|
||||
const { data } = await res.json()
|
||||
|
||||
return (data.spaces as Array<{ github: string }>).reduce<string[]>(
|
||||
(spaces, { github }) => {
|
||||
if (github !== null) spaces.push(github)
|
||||
return spaces
|
||||
},
|
||||
[],
|
||||
)
|
||||
}
|
||||
export const getGhOrgs = async (
|
||||
{
|
||||
maxOrgs = 100,
|
||||
@@ -15,23 +44,14 @@ export const getGhOrgs = async (
|
||||
({ id }) => id,
|
||||
)
|
||||
|
||||
const res = await fetch(URLS.SNAPSHOT_GQL, {
|
||||
body: JSON.stringify({
|
||||
operationName: 'Spaces',
|
||||
query: spacesGqlQuery,
|
||||
variables: { id_in: spacesIds },
|
||||
const result = new Set<string>()
|
||||
|
||||
await Promise.all(
|
||||
split(spacesIds).map(async (ids) => {
|
||||
const chunk = await getChunk(ids)
|
||||
chunk.forEach((org) => result.add(org))
|
||||
}),
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
method: 'POST',
|
||||
})
|
||||
|
||||
const { data } = await res.json()
|
||||
|
||||
return (data.spaces as Array<{ github: string }>).reduce<string[]>(
|
||||
(spaces, { github }) => {
|
||||
if (github !== null) spaces.push(github)
|
||||
return spaces
|
||||
},
|
||||
[],
|
||||
)
|
||||
|
||||
return Array.from(result)
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ export const getSpaces =
|
||||
(
|
||||
{
|
||||
maxOrgs = 100,
|
||||
minFollowers = 10,
|
||||
minFollowers = 10_000,
|
||||
}: { minFollowers: number; maxOrgs: number } = {
|
||||
maxOrgs: 100,
|
||||
minFollowers: 10_000,
|
||||
|
||||
Reference in New Issue
Block a user