
/ 목차 /
- NextJS Client
- NextJS Server
- 라마3 API 이용한 NextJS 간단한 테스트 화면
NextJS Client
- NextJS 활용하여 간단한 AI 구축 소스 입니다.code "use client" import { useState } from 'react'; import '@/app/styles/ai.css' async function getAiData(aiMessage:any) { const url = `/api/getapiai?aiMessage=${aiMessage}`; try { const response = await fetch(url); const datas = response; return datas; } catch (error) { console.error('Error:', error); throw error; } }; export default function Aichat() { const [response, setResponse] = useState(null); const [message, setMessage] = useState(''); const [inputValue, setInputValue] = useState(''); const [isLoading, setIsLoading] = useState(false); const fetchData = async () => { try { const res = await getAiData(inputValue) if (!res.ok) { throw new Error('Network response was not ok'); } const data = await res.json(); setResponse(data.message.content); setMessage(inputValue); } catch (error) { console.error('Error fetching data:', error); } finally { setIsLoading(false); // 데이터를 가져오는 동안 로딩 표시를 종료 } }; const handleKeyDown = (event:any) => { if (event.key === 'Enter') { setIsLoading(true); fetchData(); setInputValue(''); event.preventDefault(); } }; return ( <div className="AiSearchWrapper"> <div className="AiSearchForm"> <div className ="AidataSearch"><AI></div> <textarea value={inputValue} onChange={(event) => setInputValue(event.target.value)} onKeyDown={handleKeyDown} placeholder="Ask Llama3 anything..."/> <button onClick={fetchData}></button> <div className ="AidataSearch"></AI></div> </div> <div className="AiMessageWrapper"> <div className ="AiMessage" ><pre className="pre-container">User: {isLoading ? 'Loading...' : (message ? message : 'No message')}</pre></div> <div className ="AiMessage" ><pre className="pre-container">Llama3: {isLoading ? 'Loading...' : (response ? response : 'No response')}</pre></div> </div> </div> ); }
NextJS Server
code 파일명 : getapiai.tsx import { NextApiRequest, NextApiResponse } from 'next'; export default async function handler(req: NextApiRequest, res: NextApiResponse) { try { const { aiMessage } = req.query; const response = await fetch(`http://localhost:11434/api/chat`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ model: 'llama3', messages: [ { "role" : "system", "content":"Use only Korean." } , { "role": "user", "content": aiMessage } ], stream: false }) }); // 첫 번째 수정: 반환된 response를 json() 메서드를 사용하여 해석합니다. const data = await response.json(); // 두 번째 수정: res 변수를 response로 변경합니다. res.status(200).json(data); } catch (error) { console.error(error); res.status(500).json({ message: "Error Occurred" }); } };
라마3 API 이용한 NextJS 간단한 테스트 화면
- https://www.datacafe.kr/ai
Comment