Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update catalog fetch #187

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 27 additions & 13 deletions components/docs/DocsGallery.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,20 +20,34 @@ const DocsGallery = () => {
const [docsMetadata, setDocsMetadata] = useState([]);
const [viewMode, setViewMode] = useState('gallery');

const fetchDocsMetadata = async (range) => {
try {
const response = await fetch(`/api/docs?action=metadatalist&range=${range}`);
const data = await response.json();
return data.filter((doc) => doc.metadata.index !== 0);
} catch (error) {
console.error(
'Erreur lors de la récupération des métadonnées des documents',
error,
);
return [];
}
};

useEffect(() => {
const fetchDocsMetadata = async () => {
try {
const response = await fetch('/api/docs?action=list');
const data = await response.json();
setDocsMetadata(data.filter((doc) => doc.metadata.index !== 0));
} catch (error) {
console.error(
'Erreur lors de la récupération des métadonnées des documents',
error,
);
}
};
fetchDocsMetadata();
// Fetch the first 8 documents
fetchDocsMetadata('1-10').then((initialDocs) => {
setDocsMetadata(initialDocs);
});

// Fetch the remaining documents after a delay
const timer = setTimeout(() => {
fetchDocsMetadata('11-1000').then((additionalDocs) => {
setDocsMetadata((prevDocs) => [...prevDocs, ...additionalDocs]);
});
}, 100); // 0.1 seconds delay

return () => clearTimeout(timer); // Cleanup the timer
}, []);

useEffect(() => {
Expand Down
2 changes: 1 addition & 1 deletion components/docs/FilteredDocsDisplay.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ const FilteredDocsDisplay = ({ docsList }) => {
useEffect(() => {
const fetchFilteredDocs = async () => {
try {
const response = await fetch(`/api/docs?action=list`);
const response = await fetch(`/api/docs?action=metadatalist`);
const data = await response.json();
const filteredDocs = data.filter((doc) => docsList.includes(doc.name));
setDocs(filteredDocs);
Expand Down
2 changes: 1 addition & 1 deletion components/docs/LastContent.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const LastContent = () => {

useEffect(() => {
const fetchData = async () => {
const res = await fetch('/api/docs?action=list');
const res = await fetch('/api/docs?action=metadatalist');
const data = await res.json();
data.sort(
(a, b) => new Date(b.metadata.date) - new Date(a.metadata.date),
Expand Down
122 changes: 47 additions & 75 deletions pages/api/docs.js
Original file line number Diff line number Diff line change
@@ -1,91 +1,63 @@
import fs from 'fs';
import path from 'path';
import matter from 'gray-matter';
import fetch from 'node-fetch';
import { supabase } from "../../utils/supabaseClient";
import matter from 'gray-matter';

export default async function handler(req, res) {
const { filename, action } = req.query;
const { action, range } = req.query;
const FILE_LIST_MD_URL = 'https://nextcloud.datactivist.coop/s/m68ztmJaaTiBitC/download';
const NEXTCLOUD_URL = 'https://nextcloud.datactivist.coop/s/Te2XrTkdnG9zgan/download?path=/&files=';

const GITHUB_TOKEN = process.env.GITHUB_TOKEN;
const REPO_PATH = 'datactivist/open-datactivist-private';
const BASE_URL = `https://api.github.com/repos/${REPO_PATH}/contents/docs`;

if (action === 'list') {
if (action === 'filelist') {
try {
const docsPath = path.join(process.cwd(), 'posts', 'docs');
const files = fs.readdirSync(docsPath);

const fileNames = files.map((file) => {
const filePath = path.join(docsPath, file);
const fileContent = fs.readFileSync(filePath, 'utf8');
const { data } = matter(fileContent);
return {
name: file.replace(/\.md$/, ''),
metadata: data,
};
});
const fileListResponse = await fetch(FILE_LIST_MD_URL);
if (!fileListResponse.ok) {
throw new Error(`Failed to fetch file list Markdown. Status: ${fileListResponse.status}`);
}

res.status(200).json(fileNames);
const fileListMd = await fileListResponse.text();
const fileList = fileListMd.split(',').map(name => name.trim()).filter(name => name !== '');
res.status(200).json({ fileList });
} catch (error) {
res
.status(500)
.json({ message: 'Erreur lors de la récupération des fichiers.' });
res.status(500).json({ message: `Error fetching file list Markdown: ${error.message}` });
}
} else {
} else if (action === 'metadatalist') {
try {
const filePath = path.join(
process.cwd(),
'posts',
'docs',
`${filename}.md`,
);

if (fs.existsSync(filePath)) {
const fileContent = fs.readFileSync(filePath, 'utf8');
const { data, content } = matter(fileContent);
res.status(200).json({ metadata: data, content });
} else {
// If file not found locally, try fetching from GitHub
const fileURL = `${BASE_URL}/${filename}.md`;
const response = await fetch(fileURL, {
headers: {
Authorization: `token ${GITHUB_TOKEN}`,
Accept: 'application/vnd.github.v3.raw',
},
});

if (!response.ok) {
throw new Error('Failed to fetch from GitHub API');
}

const content = await response.text();
const { data, content: parsedContent } = matter(content);
const fileListResponse = await fetch(FILE_LIST_MD_URL);
if (!fileListResponse.ok) {
throw new Error(`Failed to fetch file list Markdown. Status: ${fileListResponse.status}`);
}

// Special handling for "pv" files
if (filename.endsWith('pv') && data.access === 'datactivist-team') {
const user = req.user; // Assuming you have user in req
const fileListMd = await fileListResponse.text();
let fileList = fileListMd.split(',').map(name => name.trim()).filter(name => name !== '');

if (user && user.email) {
const { data: supabaseData, error } = await supabase
.from('docaccess-datactivist-team')
.select('email')
.eq('email', user.email);
// Parse the range and adjust fileList accordingly
if (range) {
const [start, end] = range.split('-').map(Number);
fileList = fileList.slice(start - 1, end);
}

if (error || !supabaseData.length) {
res.status(403).json({ message: 'Access denied.' });
return;
}
} else {
res.status(403).json({ message: 'Access denied. No user email found.' });
return;
}
}
const fetchPromises = fileList.map(filename => {
return fetch(`${NEXTCLOUD_URL}${filename}.md`)
.then(response => {
if (!response.ok) throw new Error(`Failed to fetch file ${filename}. Status: ${response.status}`);
return response.text();
})
.then(content => {
const { data } = matter(content);
return { name: filename.replace('.md', ''), metadata: data };
})
.catch(error => {
console.error(error.message);
return null; // Return null for failed requests
});
});

res.status(200).json({ metadata: data, content: parsedContent });
}
const metadataList = (await Promise.all(fetchPromises)).filter(item => item !== null);
res.status(200).json(metadataList);
} catch (error) {
res.status(500).json({ message: 'Erreur lors de la récupération du contenu.' });

res.status(500).json({ message: `Error processing metadata list: ${error.message}` });
}
} else {
res.status(400).json({ message: 'Invalid action' });
}
}
}