|
1 | | -import * as core from '@actions/core'; |
2 | | -import { NotionEndpoints } from '@nishans/endpoints'; |
3 | | -import { ICollection, TCollectionBlock } from '@nishans/types'; |
4 | | -import fs from 'fs'; |
5 | | -import { checkForSections } from './utils/checkForSections'; |
6 | | -import { commitFile } from './utils/commitFile'; |
7 | | -import { constructCategoriesMap } from './utils/constructCategoriesMap'; |
8 | | -import { constructNewContents } from './utils/constructNewContents'; |
9 | | -import { fetchData } from './utils/fetchData'; |
10 | | -import { getSchemaEntries } from './utils/getSchemaEntries'; |
11 | | -import { modifyRows } from './utils/modifyRows'; |
| 1 | +import { action } from './action'; |
12 | 2 |
|
13 | | -async function main() { |
14 | | - try { |
15 | | - const NOTION_TOKEN_V2 = core.getInput('token_v2'); |
16 | | - const databaseId = core.getInput('database_id'); |
17 | | - |
18 | | - const collectionView = await fetchData<TCollectionBlock>( |
19 | | - databaseId, |
20 | | - 'block' |
21 | | - ); |
22 | | - core.info('Fetched database'); |
23 | | - |
24 | | - const collection_id = collectionView.collection_id; |
25 | | - const collection = await fetchData<ICollection>( |
26 | | - collection_id, |
27 | | - 'collection' |
28 | | - ); |
29 | | - |
30 | | - core.info('Fetched collection'); |
31 | | - |
32 | | - const { recordMap } = await NotionEndpoints.Queries.queryCollection( |
33 | | - { |
34 | | - collectionId: collection_id, |
35 | | - collectionViewId: '', |
36 | | - query: {}, |
37 | | - loader: { |
38 | | - type: 'table', |
39 | | - loadContentCover: false, |
40 | | - limit: 10000, |
41 | | - userTimeZone: '' |
42 | | - } |
43 | | - }, |
44 | | - { |
45 | | - token: NOTION_TOKEN_V2, |
46 | | - user_id: '' |
47 | | - } |
48 | | - ); |
49 | | - |
50 | | - core.info('Fetched rows'); |
51 | | - const { schema } = collection; |
52 | | - const [category_schema_entry, color_schema_entry] = getSchemaEntries( |
53 | | - schema |
54 | | - ); |
55 | | - |
56 | | - const rows = modifyRows(recordMap, databaseId); |
57 | | - |
58 | | - if (rows.length === 0) return core.error('No database rows detected'); |
59 | | - else { |
60 | | - const categories_map = constructCategoriesMap(category_schema_entry[1]); |
61 | | - rows.forEach((row) => { |
62 | | - const category = row.properties[category_schema_entry[0]][0][0]; |
63 | | - if (!category) throw new Error('Each row must have a category value'); |
64 | | - const category_value = categories_map.get(category); |
65 | | - category_value!.items.push(row.properties); |
66 | | - }); |
67 | | - |
68 | | - const README_PATH = `${process.env.GITHUB_WORKSPACE}/README.md`; |
69 | | - core.info(`Reading from ${README_PATH}`); |
70 | | - |
71 | | - const readmeLines = fs.readFileSync(README_PATH, 'utf-8').split('\n'); |
72 | | - |
73 | | - const [startIdx, endIdx] = checkForSections(readmeLines); |
74 | | - const newLines = constructNewContents( |
75 | | - categories_map, |
76 | | - color_schema_entry[0] |
77 | | - ); |
78 | | - |
79 | | - const finalLines = [ |
80 | | - ...readmeLines.slice(0, startIdx + 1), |
81 | | - ...newLines, |
82 | | - ...readmeLines.slice(endIdx) |
83 | | - ]; |
84 | | - |
85 | | - core.info(`Writing to ${README_PATH}`); |
86 | | - |
87 | | - fs.writeFileSync(README_PATH, finalLines.join('\n'), 'utf-8'); |
88 | | - |
89 | | - try { |
90 | | - await commitFile(); |
91 | | - } catch (err) { |
92 | | - core.setFailed(err.message); |
93 | | - } |
94 | | - } |
95 | | - } catch (error) { |
96 | | - core.setFailed(error.message); |
97 | | - } |
98 | | -} |
99 | | - |
100 | | -main(); |
| 3 | +action(); |
0 commit comments